From 174b9a482f61aff561385732542a17cca6aff8e8 Mon Sep 17 00:00:00 2001 From: indraneelSLU Date: Wed, 10 Nov 2021 05:45:31 -0600 Subject: [PATCH 1/5] setting heroku --- Procfile | 1 + requirements.txt | 17 +++++++---------- 2 files changed, 8 insertions(+), 10 deletions(-) create mode 100644 Procfile diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..ca6e941 --- /dev/null +++ b/Procfile @@ -0,0 +1 @@ +web: gunicorn app:app diff --git a/requirements.txt b/requirements.txt index b3453a2..c950cc6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,7 @@ -appdirs==1.4.3 -click==6.7 -Flask==0.12 -itsdangerous==0.24 -Jinja2==2.9.5 -MarkupSafe==1.0 -packaging==16.8 -pyparsing==2.2.0 -six==1.10.0 -Werkzeug==0.11.15 +click==8.0.3 +Flask==2.0.2 +gunicorn==20.1.0 +itsdangerous==2.0.1 +Jinja2==3.0.3 +MarkupSafe==2.0.1 +Werkzeug==2.0.2 From 3396097dfcedf7f8bdd0b2628501afbff26398f0 Mon Sep 17 00:00:00 2001 From: indraneelSLU Date: Wed, 10 Nov 2021 05:47:41 -0600 Subject: [PATCH 2/5] environment and Procfile --- Procfile | 2 +- venv/bin/Activate.ps1 | 241 + venv/bin/activate | 76 + venv/bin/activate.csh | 37 + venv/bin/activate.fish | 75 + venv/bin/easy_install | 8 + venv/bin/easy_install-3.8 | 8 + venv/bin/flask | 8 + venv/bin/gunicorn | 8 + venv/bin/pip | 8 + venv/bin/pip3 | 8 + venv/bin/pip3.8 | 8 + venv/bin/python | 1 + venv/bin/python3 | 1 + .../Flask-2.0.2.dist-info/INSTALLER | 1 + .../Flask-2.0.2.dist-info/LICENSE.rst | 28 + .../Flask-2.0.2.dist-info/METADATA | 125 + .../Flask-2.0.2.dist-info/RECORD | 51 + .../site-packages/Flask-2.0.2.dist-info/WHEEL | 5 + .../Flask-2.0.2.dist-info/entry_points.txt | 3 + .../Flask-2.0.2.dist-info/top_level.txt | 1 + .../Jinja2-3.0.3.dist-info/INSTALLER | 1 + .../Jinja2-3.0.3.dist-info/LICENSE.rst | 28 + .../Jinja2-3.0.3.dist-info/METADATA | 113 + .../Jinja2-3.0.3.dist-info/RECORD | 58 + .../Jinja2-3.0.3.dist-info/WHEEL | 5 + .../Jinja2-3.0.3.dist-info/entry_points.txt | 3 + .../Jinja2-3.0.3.dist-info/top_level.txt | 1 + .../MarkupSafe-2.0.1.dist-info/INSTALLER | 1 + .../MarkupSafe-2.0.1.dist-info/LICENSE.rst | 28 + .../MarkupSafe-2.0.1.dist-info/METADATA | 100 + .../MarkupSafe-2.0.1.dist-info/RECORD | 14 + .../MarkupSafe-2.0.1.dist-info/WHEEL | 5 + .../MarkupSafe-2.0.1.dist-info/top_level.txt | 1 + .../Werkzeug-2.0.2.dist-info/INSTALLER | 1 + .../Werkzeug-2.0.2.dist-info/LICENSE.rst | 28 + .../Werkzeug-2.0.2.dist-info/METADATA | 129 + .../Werkzeug-2.0.2.dist-info/RECORD | 111 + .../Werkzeug-2.0.2.dist-info/WHEEL | 5 + .../Werkzeug-2.0.2.dist-info/top_level.txt | 1 + .../click-8.0.3.dist-info/INSTALLER | 1 + .../click-8.0.3.dist-info/LICENSE.rst | 28 + .../click-8.0.3.dist-info/METADATA | 111 + .../click-8.0.3.dist-info/RECORD | 41 + .../site-packages/click-8.0.3.dist-info/WHEEL | 5 + .../click-8.0.3.dist-info/top_level.txt | 1 + .../python3.8/site-packages/click/__init__.py | 75 + .../python3.8/site-packages/click/_compat.py | 627 ++ .../site-packages/click/_termui_impl.py | 718 ++ .../site-packages/click/_textwrap.py | 49 + .../site-packages/click/_unicodefun.py | 100 + .../site-packages/click/_winconsole.py | 279 + .../lib/python3.8/site-packages/click/core.py | 2953 ++++++ .../site-packages/click/decorators.py | 436 + .../site-packages/click/exceptions.py | 287 + .../site-packages/click/formatting.py | 301 + .../python3.8/site-packages/click/globals.py | 69 + .../python3.8/site-packages/click/parser.py | 529 ++ .../python3.8/site-packages/click/py.typed | 0 .../site-packages/click/shell_completion.py | 581 ++ .../python3.8/site-packages/click/termui.py | 809 ++ .../python3.8/site-packages/click/testing.py | 479 + .../python3.8/site-packages/click/types.py | 1052 +++ .../python3.8/site-packages/click/utils.py | 579 ++ .../python3.8/site-packages/easy_install.py | 5 + .../python3.8/site-packages/flask/__init__.py | 46 + .../python3.8/site-packages/flask/__main__.py | 3 + venv/lib/python3.8/site-packages/flask/app.py | 2091 +++++ .../site-packages/flask/blueprints.py | 609 ++ venv/lib/python3.8/site-packages/flask/cli.py | 998 ++ .../python3.8/site-packages/flask/config.py | 295 + venv/lib/python3.8/site-packages/flask/ctx.py | 480 + .../site-packages/flask/debughelpers.py | 172 + .../python3.8/site-packages/flask/globals.py | 59 + .../python3.8/site-packages/flask/helpers.py | 836 ++ .../site-packages/flask/json/__init__.py | 357 + .../python3.8/site-packages/flask/json/tag.py | 312 + .../python3.8/site-packages/flask/logging.py | 74 + .../python3.8/site-packages/flask/py.typed | 0 .../python3.8/site-packages/flask/scaffold.py | 875 ++ .../python3.8/site-packages/flask/sessions.py | 404 + .../python3.8/site-packages/flask/signals.py | 56 + .../site-packages/flask/templating.py | 165 + .../python3.8/site-packages/flask/testing.py | 280 + .../python3.8/site-packages/flask/typing.py | 56 + .../python3.8/site-packages/flask/views.py | 158 + .../python3.8/site-packages/flask/wrappers.py | 167 + .../gunicorn-20.1.0.dist-info/INSTALLER | 1 + .../gunicorn-20.1.0.dist-info/LICENSE | 23 + .../gunicorn-20.1.0.dist-info/METADATA | 120 + .../gunicorn-20.1.0.dist-info/RECORD | 76 + .../gunicorn-20.1.0.dist-info/WHEEL | 5 + .../entry_points.txt | 7 + .../gunicorn-20.1.0.dist-info/top_level.txt | 1 + .../site-packages/gunicorn/__init__.py | 9 + .../site-packages/gunicorn/__main__.py | 7 + .../site-packages/gunicorn/app/__init__.py | 4 + .../site-packages/gunicorn/app/base.py | 231 + .../site-packages/gunicorn/app/pasterapp.py | 75 + .../site-packages/gunicorn/app/wsgiapp.py | 71 + .../site-packages/gunicorn/arbiter.py | 652 ++ .../site-packages/gunicorn/config.py | 2190 +++++ .../python3.8/site-packages/gunicorn/debug.py | 69 + .../site-packages/gunicorn/errors.py | 29 + .../site-packages/gunicorn/glogging.py | 464 + .../site-packages/gunicorn/http/__init__.py | 9 + .../site-packages/gunicorn/http/body.py | 262 + .../site-packages/gunicorn/http/errors.py | 120 + .../site-packages/gunicorn/http/message.py | 356 + .../site-packages/gunicorn/http/parser.py | 52 + .../site-packages/gunicorn/http/unreader.py | 79 + .../site-packages/gunicorn/http/wsgi.py | 393 + .../gunicorn/instrument/__init__.py | 0 .../gunicorn/instrument/statsd.py | 130 + .../site-packages/gunicorn/pidfile.py | 86 + .../site-packages/gunicorn/reloader.py | 132 + .../python3.8/site-packages/gunicorn/sock.py | 212 + .../site-packages/gunicorn/systemd.py | 77 + .../python3.8/site-packages/gunicorn/util.py | 639 ++ .../gunicorn/workers/__init__.py | 15 + .../site-packages/gunicorn/workers/base.py | 273 + .../gunicorn/workers/base_async.py | 148 + .../gunicorn/workers/geventlet.py | 179 + .../site-packages/gunicorn/workers/ggevent.py | 189 + .../site-packages/gunicorn/workers/gthread.py | 362 + .../gunicorn/workers/gtornado.py | 171 + .../site-packages/gunicorn/workers/sync.py | 211 + .../gunicorn/workers/workertmp.py | 55 + .../itsdangerous-2.0.1.dist-info/INSTALLER | 1 + .../itsdangerous-2.0.1.dist-info/LICENSE.rst | 28 + .../itsdangerous-2.0.1.dist-info/METADATA | 96 + .../itsdangerous-2.0.1.dist-info/RECORD | 25 + .../itsdangerous-2.0.1.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/itsdangerous/__init__.py | 22 + .../site-packages/itsdangerous/_json.py | 34 + .../site-packages/itsdangerous/encoding.py | 54 + .../site-packages/itsdangerous/exc.py | 107 + .../site-packages/itsdangerous/jws.py | 259 + .../site-packages/itsdangerous/py.typed | 0 .../site-packages/itsdangerous/serializer.py | 295 + .../site-packages/itsdangerous/signer.py | 257 + .../site-packages/itsdangerous/timed.py | 227 + .../site-packages/itsdangerous/url_safe.py | 80 + .../site-packages/jinja2/__init__.py | 45 + .../site-packages/jinja2/_identifier.py | 6 + .../site-packages/jinja2/async_utils.py | 75 + .../python3.8/site-packages/jinja2/bccache.py | 364 + .../site-packages/jinja2/compiler.py | 1957 ++++ .../site-packages/jinja2/constants.py | 20 + .../python3.8/site-packages/jinja2/debug.py | 259 + .../site-packages/jinja2/defaults.py | 48 + .../site-packages/jinja2/environment.py | 1661 ++++ .../site-packages/jinja2/exceptions.py | 166 + .../lib/python3.8/site-packages/jinja2/ext.py | 879 ++ .../python3.8/site-packages/jinja2/filters.py | 1824 ++++ .../site-packages/jinja2/idtracking.py | 318 + .../python3.8/site-packages/jinja2/lexer.py | 869 ++ .../python3.8/site-packages/jinja2/loaders.py | 652 ++ .../python3.8/site-packages/jinja2/meta.py | 111 + .../site-packages/jinja2/nativetypes.py | 124 + .../python3.8/site-packages/jinja2/nodes.py | 1204 +++ .../site-packages/jinja2/optimizer.py | 47 + .../python3.8/site-packages/jinja2/parser.py | 1040 +++ .../python3.8/site-packages/jinja2/py.typed | 0 .../python3.8/site-packages/jinja2/runtime.py | 1104 +++ .../python3.8/site-packages/jinja2/sandbox.py | 428 + .../python3.8/site-packages/jinja2/tests.py | 255 + .../python3.8/site-packages/jinja2/utils.py | 854 ++ .../python3.8/site-packages/jinja2/visitor.py | 92 + .../site-packages/markupsafe/__init__.py | 288 + .../site-packages/markupsafe/_native.py | 75 + .../site-packages/markupsafe/_speedups.c | 339 + .../markupsafe/_speedups.cpython-38-darwin.so | Bin 0 -> 35480 bytes .../site-packages/markupsafe/_speedups.pyi | 9 + .../site-packages/markupsafe/py.typed | 0 .../pip-20.1.1.dist-info/INSTALLER | 1 + .../pip-20.1.1.dist-info/LICENSE.txt | 20 + .../pip-20.1.1.dist-info/METADATA | 87 + .../site-packages/pip-20.1.1.dist-info/RECORD | 743 ++ .../site-packages/pip-20.1.1.dist-info/WHEEL | 6 + .../pip-20.1.1.dist-info/entry_points.txt | 5 + .../pip-20.1.1.dist-info/top_level.txt | 1 + .../python3.8/site-packages/pip/__init__.py | 18 + .../python3.8/site-packages/pip/__main__.py | 26 + .../site-packages/pip/_internal/__init__.py | 17 + .../site-packages/pip/_internal/build_env.py | 219 + .../site-packages/pip/_internal/cache.py | 349 + .../pip/_internal/cli/__init__.py | 4 + .../pip/_internal/cli/autocompletion.py | 164 + .../pip/_internal/cli/base_command.py | 228 + .../pip/_internal/cli/cmdoptions.py | 962 ++ .../pip/_internal/cli/command_context.py | 36 + .../site-packages/pip/_internal/cli/main.py | 75 + .../pip/_internal/cli/main_parser.py | 99 + .../site-packages/pip/_internal/cli/parser.py | 266 + .../pip/_internal/cli/progress_bars.py | 277 + .../pip/_internal/cli/req_command.py | 408 + .../pip/_internal/cli/spinners.py | 173 + .../pip/_internal/cli/status_codes.py | 8 + .../pip/_internal/commands/__init__.py | 122 + .../pip/_internal/commands/cache.py | 181 + .../pip/_internal/commands/check.py | 51 + .../pip/_internal/commands/completion.py | 95 + .../pip/_internal/commands/configuration.py | 233 + .../pip/_internal/commands/debug.py | 237 + .../pip/_internal/commands/download.py | 142 + .../pip/_internal/commands/freeze.py | 99 + .../pip/_internal/commands/hash.py | 58 + .../pip/_internal/commands/help.py | 41 + .../pip/_internal/commands/install.py | 691 ++ .../pip/_internal/commands/list.py | 299 + .../pip/_internal/commands/search.py | 146 + .../pip/_internal/commands/show.py | 180 + .../pip/_internal/commands/uninstall.py | 89 + .../pip/_internal/commands/wheel.py | 190 + .../pip/_internal/configuration.py | 426 + .../pip/_internal/distributions/__init__.py | 24 + .../pip/_internal/distributions/base.py | 45 + .../pip/_internal/distributions/installed.py | 24 + .../pip/_internal/distributions/sdist.py | 104 + .../pip/_internal/distributions/wheel.py | 36 + .../site-packages/pip/_internal/exceptions.py | 308 + .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 661 ++ .../pip/_internal/index/package_finder.py | 1016 ++ .../site-packages/pip/_internal/locations.py | 194 + .../site-packages/pip/_internal/main.py | 16 + .../pip/_internal/models/__init__.py | 2 + .../pip/_internal/models/candidate.py | 36 + .../pip/_internal/models/direct_url.py | 245 + .../pip/_internal/models/format_control.py | 84 + .../pip/_internal/models/index.py | 31 + .../pip/_internal/models/link.py | 236 + .../pip/_internal/models/scheme.py | 25 + .../pip/_internal/models/search_scope.py | 133 + .../pip/_internal/models/selection_prefs.py | 47 + .../pip/_internal/models/target_python.py | 110 + .../pip/_internal/models/wheel.py | 78 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 298 + .../pip/_internal/network/cache.py | 81 + .../pip/_internal/network/download.py | 200 + .../pip/_internal/network/session.py | 421 + .../pip/_internal/network/utils.py | 48 + .../pip/_internal/network/xmlrpc.py | 44 + .../pip/_internal/operations/__init__.py | 0 .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 40 + .../operations/build/metadata_legacy.py | 77 + .../pip/_internal/operations/build/wheel.py | 46 + .../operations/build/wheel_legacy.py | 115 + .../pip/_internal/operations/check.py | 163 + .../pip/_internal/operations/freeze.py | 272 + .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 52 + .../_internal/operations/install/legacy.py | 142 + .../pip/_internal/operations/install/wheel.py | 631 ++ .../pip/_internal/operations/prepare.py | 568 ++ .../site-packages/pip/_internal/pyproject.py | 196 + .../pip/_internal/req/__init__.py | 92 + .../pip/_internal/req/constructors.py | 464 + .../pip/_internal/req/req_file.py | 582 ++ .../pip/_internal/req/req_install.py | 850 ++ .../pip/_internal/req/req_set.py | 202 + .../pip/_internal/req/req_tracker.py | 151 + .../pip/_internal/req/req_uninstall.py | 649 ++ .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../_internal/resolution/legacy/resolver.py | 459 + .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 52 + .../resolution/resolvelib/candidates.py | 450 + .../resolution/resolvelib/factory.py | 201 + .../resolution/resolvelib/provider.py | 54 + .../resolution/resolvelib/requirements.py | 119 + .../resolution/resolvelib/resolver.py | 174 + .../pip/_internal/self_outdated_check.py | 242 + .../pip/_internal/utils/__init__.py | 0 .../pip/_internal/utils/appdirs.py | 44 + .../pip/_internal/utils/compat.py | 270 + .../pip/_internal/utils/compatibility_tags.py | 169 + .../pip/_internal/utils/deprecation.py | 104 + .../pip/_internal/utils/direct_url_helpers.py | 130 + .../pip/_internal/utils/distutils_args.py | 48 + .../pip/_internal/utils/encoding.py | 42 + .../pip/_internal/utils/entrypoints.py | 31 + .../pip/_internal/utils/filesystem.py | 222 + .../pip/_internal/utils/filetypes.py | 16 + .../pip/_internal/utils/glibc.py | 98 + .../pip/_internal/utils/hashes.py | 133 + .../_internal/utils/inject_securetransport.py | 36 + .../pip/_internal/utils/logging.py | 399 + .../site-packages/pip/_internal/utils/misc.py | 913 ++ .../pip/_internal/utils/models.py | 42 + .../pip/_internal/utils/packaging.py | 94 + .../pip/_internal/utils/pkg_resources.py | 44 + .../pip/_internal/utils/setuptools_build.py | 181 + .../pip/_internal/utils/subprocess.py | 277 + .../pip/_internal/utils/temp_dir.py | 271 + .../pip/_internal/utils/typing.py | 38 + .../pip/_internal/utils/unpacking.py | 272 + .../site-packages/pip/_internal/utils/urls.py | 55 + .../pip/_internal/utils/virtualenv.py | 116 + .../pip/_internal/utils/wheel.py | 225 + .../pip/_internal/vcs/__init__.py | 15 + .../site-packages/pip/_internal/vcs/bazaar.py | 120 + .../site-packages/pip/_internal/vcs/git.py | 394 + .../pip/_internal/vcs/mercurial.py | 161 + .../pip/_internal/vcs/subversion.py | 334 + .../pip/_internal/vcs/versioncontrol.py | 723 ++ .../pip/_internal/wheel_builder.py | 309 + .../site-packages/pip/_vendor/__init__.py | 114 + .../site-packages/pip/_vendor/appdirs.py | 633 ++ .../pip/_vendor/cachecontrol/__init__.py | 11 + .../pip/_vendor/cachecontrol/_cmd.py | 57 + .../pip/_vendor/cachecontrol/adapter.py | 133 + .../pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/__init__.py | 2 + .../_vendor/cachecontrol/caches/file_cache.py | 146 + .../cachecontrol/caches/redis_cache.py | 33 + .../pip/_vendor/cachecontrol/compat.py | 29 + .../pip/_vendor/cachecontrol/controller.py | 376 + .../pip/_vendor/cachecontrol/filewrapper.py | 80 + .../pip/_vendor/cachecontrol/heuristics.py | 135 + .../pip/_vendor/cachecontrol/serialize.py | 188 + .../pip/_vendor/cachecontrol/wrapper.py | 29 + .../pip/_vendor/certifi/__init__.py | 3 + .../pip/_vendor/certifi/__main__.py | 12 + .../pip/_vendor/certifi/cacert.pem | 4641 +++++++++ .../site-packages/pip/_vendor/certifi/core.py | 30 + .../pip/_vendor/chardet/__init__.py | 39 + .../pip/_vendor/chardet/big5freq.py | 386 + .../pip/_vendor/chardet/big5prober.py | 47 + .../pip/_vendor/chardet/chardistribution.py | 233 + .../pip/_vendor/chardet/charsetgroupprober.py | 106 + .../pip/_vendor/chardet/charsetprober.py | 145 + .../pip/_vendor/chardet/cli/__init__.py | 1 + .../pip/_vendor/chardet/cli/chardetect.py | 85 + .../pip/_vendor/chardet/codingstatemachine.py | 88 + .../pip/_vendor/chardet/compat.py | 34 + .../pip/_vendor/chardet/cp949prober.py | 49 + .../pip/_vendor/chardet/enums.py | 76 + .../pip/_vendor/chardet/escprober.py | 101 + .../pip/_vendor/chardet/escsm.py | 246 + .../pip/_vendor/chardet/eucjpprober.py | 92 + .../pip/_vendor/chardet/euckrfreq.py | 195 + .../pip/_vendor/chardet/euckrprober.py | 47 + .../pip/_vendor/chardet/euctwfreq.py | 387 + .../pip/_vendor/chardet/euctwprober.py | 46 + .../pip/_vendor/chardet/gb2312freq.py | 283 + .../pip/_vendor/chardet/gb2312prober.py | 46 + .../pip/_vendor/chardet/hebrewprober.py | 292 + .../pip/_vendor/chardet/jisfreq.py | 325 + .../pip/_vendor/chardet/jpcntx.py | 233 + .../pip/_vendor/chardet/langbulgarianmodel.py | 228 + .../pip/_vendor/chardet/langcyrillicmodel.py | 333 + .../pip/_vendor/chardet/langgreekmodel.py | 225 + .../pip/_vendor/chardet/langhebrewmodel.py | 200 + .../pip/_vendor/chardet/langhungarianmodel.py | 225 + .../pip/_vendor/chardet/langthaimodel.py | 199 + .../pip/_vendor/chardet/langturkishmodel.py | 193 + .../pip/_vendor/chardet/latin1prober.py | 145 + .../pip/_vendor/chardet/mbcharsetprober.py | 91 + .../pip/_vendor/chardet/mbcsgroupprober.py | 54 + .../pip/_vendor/chardet/mbcssm.py | 572 ++ .../pip/_vendor/chardet/sbcharsetprober.py | 132 + .../pip/_vendor/chardet/sbcsgroupprober.py | 73 + .../pip/_vendor/chardet/sjisprober.py | 92 + .../pip/_vendor/chardet/universaldetector.py | 286 + .../pip/_vendor/chardet/utf8prober.py | 82 + .../pip/_vendor/chardet/version.py | 9 + .../pip/_vendor/colorama/__init__.py | 6 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 257 + .../pip/_vendor/colorama/initialise.py | 80 + .../pip/_vendor/colorama/win32.py | 152 + .../pip/_vendor/colorama/winterm.py | 169 + .../site-packages/pip/_vendor/contextlib2.py | 518 + .../pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 761 ++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 786 ++ .../pip/_vendor/distlib/_backport/tarfile.py | 2607 ++++++ .../pip/_vendor/distlib/compat.py | 1120 +++ .../pip/_vendor/distlib/database.py | 1339 +++ .../pip/_vendor/distlib/index.py | 516 + .../pip/_vendor/distlib/locators.py | 1302 +++ .../pip/_vendor/distlib/manifest.py | 393 + .../pip/_vendor/distlib/markers.py | 131 + .../pip/_vendor/distlib/metadata.py | 1096 +++ .../pip/_vendor/distlib/resources.py | 355 + .../pip/_vendor/distlib/scripts.py | 416 + .../site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 96768 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 105984 bytes .../site-packages/pip/_vendor/distlib/util.py | 1761 ++++ .../pip/_vendor/distlib/version.py | 736 ++ .../site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 90112 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 99840 bytes .../pip/_vendor/distlib/wheel.py | 1004 ++ .../site-packages/pip/_vendor/distro.py | 1230 +++ .../pip/_vendor/html5lib/__init__.py | 35 + .../pip/_vendor/html5lib/_ihatexml.py | 288 + .../pip/_vendor/html5lib/_inputstream.py | 923 ++ .../pip/_vendor/html5lib/_tokenizer.py | 1721 ++++ .../pip/_vendor/html5lib/_trie/__init__.py | 14 + .../pip/_vendor/html5lib/_trie/_base.py | 40 + .../pip/_vendor/html5lib/_trie/datrie.py | 44 + .../pip/_vendor/html5lib/_trie/py.py | 67 + .../pip/_vendor/html5lib/_utils.py | 124 + .../pip/_vendor/html5lib/constants.py | 2947 ++++++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 29 + .../pip/_vendor/html5lib/filters/base.py | 12 + .../html5lib/filters/inject_meta_charset.py | 73 + .../pip/_vendor/html5lib/filters/lint.py | 93 + .../_vendor/html5lib/filters/optionaltags.py | 207 + .../pip/_vendor/html5lib/filters/sanitizer.py | 896 ++ .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2791 ++++++ .../pip/_vendor/html5lib/serializer.py | 409 + .../_vendor/html5lib/treeadapters/__init__.py | 30 + .../_vendor/html5lib/treeadapters/genshi.py | 54 + .../pip/_vendor/html5lib/treeadapters/sax.py | 50 + .../_vendor/html5lib/treebuilders/__init__.py | 88 + .../pip/_vendor/html5lib/treebuilders/base.py | 417 + .../pip/_vendor/html5lib/treebuilders/dom.py | 239 + .../_vendor/html5lib/treebuilders/etree.py | 340 + .../html5lib/treebuilders/etree_lxml.py | 366 + .../_vendor/html5lib/treewalkers/__init__.py | 154 + .../pip/_vendor/html5lib/treewalkers/base.py | 252 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 130 + .../html5lib/treewalkers/etree_lxml.py | 213 + .../_vendor/html5lib/treewalkers/genshi.py | 69 + .../pip/_vendor/idna/__init__.py | 2 + .../site-packages/pip/_vendor/idna/codec.py | 118 + .../site-packages/pip/_vendor/idna/compat.py | 12 + .../site-packages/pip/_vendor/idna/core.py | 398 + .../pip/_vendor/idna/idnadata.py | 1991 ++++ .../pip/_vendor/idna/intranges.py | 53 + .../pip/_vendor/idna/package_data.py | 2 + .../pip/_vendor/idna/uts46data.py | 8317 +++++++++++++++++ .../site-packages/pip/_vendor/ipaddress.py | 2420 +++++ .../pip/_vendor/msgpack/__init__.py | 54 + .../pip/_vendor/msgpack/_version.py | 1 + .../pip/_vendor/msgpack/exceptions.py | 48 + .../site-packages/pip/_vendor/msgpack/ext.py | 191 + .../pip/_vendor/msgpack/fallback.py | 1063 +++ .../pip/_vendor/packaging/__about__.py | 27 + .../pip/_vendor/packaging/__init__.py | 26 + .../pip/_vendor/packaging/_compat.py | 38 + .../pip/_vendor/packaging/_structures.py | 86 + .../pip/_vendor/packaging/_typing.py | 39 + .../pip/_vendor/packaging/markers.py | 328 + .../pip/_vendor/packaging/requirements.py | 145 + .../pip/_vendor/packaging/specifiers.py | 849 ++ .../pip/_vendor/packaging/tags.py | 739 ++ .../pip/_vendor/packaging/utils.py | 62 + .../pip/_vendor/packaging/version.py | 535 ++ .../pip/_vendor/pep517/__init__.py | 4 + .../pip/_vendor/pep517/_in_process.py | 280 + .../site-packages/pip/_vendor/pep517/build.py | 124 + .../site-packages/pip/_vendor/pep517/check.py | 203 + .../pip/_vendor/pep517/colorlog.py | 115 + .../pip/_vendor/pep517/compat.py | 34 + .../pip/_vendor/pep517/dirtools.py | 44 + .../pip/_vendor/pep517/envbuild.py | 167 + .../site-packages/pip/_vendor/pep517/meta.py | 92 + .../pip/_vendor/pep517/wrappers.py | 308 + .../pip/_vendor/pkg_resources/__init__.py | 3296 +++++++ .../pip/_vendor/pkg_resources/py31compat.py | 23 + .../pip/_vendor/progress/__init__.py | 177 + .../site-packages/pip/_vendor/progress/bar.py | 91 + .../pip/_vendor/progress/counter.py | 41 + .../pip/_vendor/progress/spinner.py | 43 + .../site-packages/pip/_vendor/pyparsing.py | 7107 ++++++++++++++ .../pip/_vendor/requests/__init__.py | 133 + .../pip/_vendor/requests/__version__.py | 14 + .../pip/_vendor/requests/_internal_utils.py | 42 + .../pip/_vendor/requests/adapters.py | 533 ++ .../site-packages/pip/_vendor/requests/api.py | 161 + .../pip/_vendor/requests/auth.py | 305 + .../pip/_vendor/requests/certs.py | 18 + .../pip/_vendor/requests/compat.py | 76 + .../pip/_vendor/requests/cookies.py | 549 ++ .../pip/_vendor/requests/exceptions.py | 126 + .../pip/_vendor/requests/help.py | 119 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 954 ++ .../pip/_vendor/requests/packages.py | 16 + .../pip/_vendor/requests/sessions.py | 767 ++ .../pip/_vendor/requests/status_codes.py | 123 + .../pip/_vendor/requests/structures.py | 105 + .../pip/_vendor/requests/utils.py | 982 ++ .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/providers.py | 121 + .../pip/_vendor/resolvelib/reporters.py | 36 + .../pip/_vendor/resolvelib/resolvers.py | 414 + .../pip/_vendor/resolvelib/structs.py | 68 + .../site-packages/pip/_vendor/retrying.py | 267 + .../site-packages/pip/_vendor/six.py | 980 ++ .../site-packages/pip/_vendor/toml.py | 1039 ++ .../pip/_vendor/toml/__init__.py | 21 + .../site-packages/pip/_vendor/toml/decoder.py | 945 ++ .../site-packages/pip/_vendor/toml/encoder.py | 250 + .../site-packages/pip/_vendor/toml/ordered.py | 15 + .../site-packages/pip/_vendor/toml/tz.py | 21 + .../pip/_vendor/urllib3/__init__.py | 86 + .../pip/_vendor/urllib3/_collections.py | 336 + .../pip/_vendor/urllib3/connection.py | 414 + .../pip/_vendor/urllib3/connectionpool.py | 1051 +++ .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 493 + .../contrib/_securetransport/low_level.py | 328 + .../pip/_vendor/urllib3/contrib/appengine.py | 314 + .../pip/_vendor/urllib3/contrib/ntlmpool.py | 121 + .../pip/_vendor/urllib3/contrib/pyopenssl.py | 498 + .../urllib3/contrib/securetransport.py | 859 ++ .../pip/_vendor/urllib3/contrib/socks.py | 210 + .../pip/_vendor/urllib3/exceptions.py | 255 + .../pip/_vendor/urllib3/fields.py | 273 + .../pip/_vendor/urllib3/filepost.py | 98 + .../pip/_vendor/urllib3/packages/__init__.py | 5 + .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 52 + .../pip/_vendor/urllib3/packages/six.py | 1021 ++ .../packages/ssl_match_hostname/__init__.py | 19 + .../ssl_match_hostname/_implementation.py | 160 + .../pip/_vendor/urllib3/poolmanager.py | 470 + .../pip/_vendor/urllib3/request.py | 171 + .../pip/_vendor/urllib3/response.py | 809 ++ .../pip/_vendor/urllib3/util/__init__.py | 46 + .../pip/_vendor/urllib3/util/connection.py | 138 + .../pip/_vendor/urllib3/util/queue.py | 21 + .../pip/_vendor/urllib3/util/request.py | 135 + .../pip/_vendor/urllib3/util/response.py | 86 + .../pip/_vendor/urllib3/util/retry.py | 450 + .../pip/_vendor/urllib3/util/ssl_.py | 407 + .../pip/_vendor/urllib3/util/timeout.py | 258 + .../pip/_vendor/urllib3/util/url.py | 430 + .../pip/_vendor/urllib3/util/wait.py | 153 + .../site-packages/pip/_vendor/vendor.txt | 24 + .../pip/_vendor/webencodings/__init__.py | 342 + .../pip/_vendor/webencodings/labels.py | 231 + .../pip/_vendor/webencodings/mklabels.py | 59 + .../pip/_vendor/webencodings/tests.py | 153 + .../_vendor/webencodings/x_user_defined.py | 325 + .../site-packages/pkg_resources/__init__.py | 3307 +++++++ .../pkg_resources/_vendor/__init__.py | 0 .../pkg_resources/_vendor/appdirs.py | 608 ++ .../_vendor/packaging/__about__.py | 21 + .../_vendor/packaging/__init__.py | 14 + .../_vendor/packaging/_compat.py | 30 + .../_vendor/packaging/_structures.py | 68 + .../_vendor/packaging/markers.py | 301 + .../_vendor/packaging/requirements.py | 127 + .../_vendor/packaging/specifiers.py | 774 ++ .../pkg_resources/_vendor/packaging/utils.py | 14 + .../_vendor/packaging/version.py | 393 + .../pkg_resources/_vendor/pyparsing.py | 5742 ++++++++++++ .../pkg_resources/_vendor/six.py | 868 ++ .../pkg_resources/extern/__init__.py | 66 + .../site-packages/pkg_resources/py2_warn.py | 16 + .../site-packages/pkg_resources/py31compat.py | 23 + .../setuptools-47.1.0.dist-info/INSTALLER | 1 + .../setuptools-47.1.0.dist-info/LICENSE | 19 + .../setuptools-47.1.0.dist-info/METADATA | 109 + .../setuptools-47.1.0.dist-info/RECORD | 196 + .../setuptools-47.1.0.dist-info/WHEEL | 5 + .../dependency_links.txt | 2 + .../entry_points.txt | 68 + .../setuptools-47.1.0.dist-info/top_level.txt | 3 + .../setuptools-47.1.0.dist-info/zip-safe | 1 + .../site-packages/setuptools/__init__.py | 232 + .../setuptools/_deprecation_warning.py | 7 + .../site-packages/setuptools/_imp.py | 82 + .../setuptools/_vendor/__init__.py | 0 .../setuptools/_vendor/ordered_set.py | 488 + .../setuptools/_vendor/packaging/__about__.py | 27 + .../setuptools/_vendor/packaging/__init__.py | 26 + .../setuptools/_vendor/packaging/_compat.py | 31 + .../_vendor/packaging/_structures.py | 68 + .../setuptools/_vendor/packaging/markers.py | 296 + .../_vendor/packaging/requirements.py | 138 + .../_vendor/packaging/specifiers.py | 749 ++ .../setuptools/_vendor/packaging/tags.py | 404 + .../setuptools/_vendor/packaging/utils.py | 57 + .../setuptools/_vendor/packaging/version.py | 420 + .../setuptools/_vendor/pyparsing.py | 5742 ++++++++++++ .../site-packages/setuptools/_vendor/six.py | 868 ++ .../site-packages/setuptools/archive_util.py | 175 + .../site-packages/setuptools/build_meta.py | 272 + .../site-packages/setuptools/cli-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/cli-64.exe | Bin 0 -> 74752 bytes .../site-packages/setuptools/cli.exe | Bin 0 -> 65536 bytes .../setuptools/command/__init__.py | 17 + .../site-packages/setuptools/command/alias.py | 80 + .../setuptools/command/bdist_egg.py | 509 + .../setuptools/command/bdist_rpm.py | 43 + .../setuptools/command/bdist_wininst.py | 30 + .../setuptools/command/build_clib.py | 101 + .../setuptools/command/build_ext.py | 330 + .../setuptools/command/build_py.py | 276 + .../setuptools/command/develop.py | 221 + .../setuptools/command/dist_info.py | 36 + .../setuptools/command/easy_install.py | 2354 +++++ .../setuptools/command/egg_info.py | 721 ++ .../setuptools/command/install.py | 125 + .../setuptools/command/install_egg_info.py | 62 + .../setuptools/command/install_lib.py | 122 + .../setuptools/command/install_scripts.py | 68 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/py36compat.py | 136 + .../setuptools/command/register.py | 18 + .../setuptools/command/rotate.py | 66 + .../setuptools/command/saveopts.py | 22 + .../site-packages/setuptools/command/sdist.py | 252 + .../setuptools/command/setopt.py | 149 + .../site-packages/setuptools/command/test.py | 280 + .../setuptools/command/upload.py | 17 + .../setuptools/command/upload_docs.py | 206 + .../site-packages/setuptools/config.py | 700 ++ .../site-packages/setuptools/dep_util.py | 25 + .../site-packages/setuptools/depends.py | 176 + .../site-packages/setuptools/dist.py | 1031 ++ .../site-packages/setuptools/errors.py | 16 + .../site-packages/setuptools/extension.py | 57 + .../setuptools/extern/__init__.py | 66 + .../site-packages/setuptools/glob.py | 174 + .../site-packages/setuptools/gui-32.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/gui-64.exe | Bin 0 -> 75264 bytes .../site-packages/setuptools/gui.exe | Bin 0 -> 65536 bytes .../site-packages/setuptools/installer.py | 150 + .../site-packages/setuptools/launch.py | 35 + .../site-packages/setuptools/lib2to3_ex.py | 71 + .../site-packages/setuptools/monkey.py | 179 + .../site-packages/setuptools/msvc.py | 1825 ++++ .../site-packages/setuptools/namespaces.py | 111 + .../site-packages/setuptools/package_index.py | 1140 +++ .../site-packages/setuptools/py27compat.py | 60 + .../site-packages/setuptools/py31compat.py | 32 + .../site-packages/setuptools/py33compat.py | 59 + .../site-packages/setuptools/py34compat.py | 13 + .../site-packages/setuptools/sandbox.py | 492 + .../setuptools/script (dev).tmpl | 6 + .../site-packages/setuptools/script.tmpl | 3 + .../site-packages/setuptools/site-patch.py | 76 + .../site-packages/setuptools/ssl_support.py | 265 + .../site-packages/setuptools/unicode_utils.py | 44 + .../site-packages/setuptools/version.py | 6 + .../site-packages/setuptools/wheel.py | 217 + .../setuptools/windows_support.py | 29 + .../site-packages/werkzeug/__init__.py | 6 + .../site-packages/werkzeug/_internal.py | 626 ++ .../site-packages/werkzeug/_reloader.py | 430 + .../site-packages/werkzeug/datastructures.py | 3059 ++++++ .../site-packages/werkzeug/datastructures.pyi | 912 ++ .../site-packages/werkzeug/debug/__init__.py | 502 + .../site-packages/werkzeug/debug/console.py | 211 + .../site-packages/werkzeug/debug/repr.py | 284 + .../werkzeug/debug/shared/FONT_LICENSE | 96 + .../werkzeug/debug/shared/ICON_LICENSE.md | 6 + .../werkzeug/debug/shared/console.png | Bin 0 -> 507 bytes .../werkzeug/debug/shared/debugger.js | 359 + .../werkzeug/debug/shared/less.png | Bin 0 -> 191 bytes .../werkzeug/debug/shared/more.png | Bin 0 -> 200 bytes .../werkzeug/debug/shared/source.png | Bin 0 -> 818 bytes .../werkzeug/debug/shared/style.css | 163 + .../werkzeug/debug/shared/ubuntu.ttf | Bin 0 -> 70220 bytes .../site-packages/werkzeug/debug/tbtools.py | 600 ++ .../site-packages/werkzeug/exceptions.py | 943 ++ .../site-packages/werkzeug/filesystem.py | 55 + .../site-packages/werkzeug/formparser.py | 495 + .../python3.8/site-packages/werkzeug/http.py | 1388 +++ .../python3.8/site-packages/werkzeug/local.py | 677 ++ .../werkzeug/middleware/__init__.py | 22 + .../werkzeug/middleware/dispatcher.py | 78 + .../werkzeug/middleware/http_proxy.py | 230 + .../site-packages/werkzeug/middleware/lint.py | 420 + .../werkzeug/middleware/profiler.py | 139 + .../werkzeug/middleware/proxy_fix.py | 187 + .../werkzeug/middleware/shared_data.py | 320 + .../python3.8/site-packages/werkzeug/py.typed | 0 .../site-packages/werkzeug/routing.py | 2341 +++++ .../site-packages/werkzeug/sansio/__init__.py | 0 .../werkzeug/sansio/multipart.py | 260 + .../site-packages/werkzeug/sansio/request.py | 548 ++ .../site-packages/werkzeug/sansio/response.py | 704 ++ .../site-packages/werkzeug/sansio/utils.py | 142 + .../site-packages/werkzeug/security.py | 247 + .../site-packages/werkzeug/serving.py | 1081 +++ .../python3.8/site-packages/werkzeug/test.py | 1326 +++ .../site-packages/werkzeug/testapp.py | 240 + .../python3.8/site-packages/werkzeug/urls.py | 1211 +++ .../site-packages/werkzeug/user_agent.py | 47 + .../site-packages/werkzeug/useragents.py | 215 + .../python3.8/site-packages/werkzeug/utils.py | 1099 +++ .../werkzeug/wrappers/__init__.py | 16 + .../site-packages/werkzeug/wrappers/accept.py | 14 + .../site-packages/werkzeug/wrappers/auth.py | 26 + .../werkzeug/wrappers/base_request.py | 36 + .../werkzeug/wrappers/base_response.py | 36 + .../werkzeug/wrappers/common_descriptors.py | 26 + .../site-packages/werkzeug/wrappers/cors.py | 26 + .../site-packages/werkzeug/wrappers/etag.py | 26 + .../site-packages/werkzeug/wrappers/json.py | 13 + .../werkzeug/wrappers/request.py | 660 ++ .../werkzeug/wrappers/response.py | 890 ++ .../werkzeug/wrappers/user_agent.py | 14 + .../python3.8/site-packages/werkzeug/wsgi.py | 982 ++ venv/pyvenv.cfg | 3 + 717 files changed, 224643 insertions(+), 1 deletion(-) create mode 100644 venv/bin/Activate.ps1 create mode 100644 venv/bin/activate create mode 100644 venv/bin/activate.csh create mode 100644 venv/bin/activate.fish create mode 100755 venv/bin/easy_install create mode 100755 venv/bin/easy_install-3.8 create mode 100755 venv/bin/flask create mode 100755 venv/bin/gunicorn create mode 100755 venv/bin/pip create mode 100755 venv/bin/pip3 create mode 100755 venv/bin/pip3.8 create mode 120000 venv/bin/python create mode 120000 venv/bin/python3 create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt create mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt create mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/click/__init__.py create mode 100644 venv/lib/python3.8/site-packages/click/_compat.py create mode 100644 venv/lib/python3.8/site-packages/click/_termui_impl.py create mode 100644 venv/lib/python3.8/site-packages/click/_textwrap.py create mode 100644 venv/lib/python3.8/site-packages/click/_unicodefun.py create mode 100644 venv/lib/python3.8/site-packages/click/_winconsole.py create mode 100644 venv/lib/python3.8/site-packages/click/core.py create mode 100644 venv/lib/python3.8/site-packages/click/decorators.py create mode 100644 venv/lib/python3.8/site-packages/click/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/click/formatting.py create mode 100644 venv/lib/python3.8/site-packages/click/globals.py create mode 100644 venv/lib/python3.8/site-packages/click/parser.py create mode 100644 venv/lib/python3.8/site-packages/click/py.typed create mode 100644 venv/lib/python3.8/site-packages/click/shell_completion.py create mode 100644 venv/lib/python3.8/site-packages/click/termui.py create mode 100644 venv/lib/python3.8/site-packages/click/testing.py create mode 100644 venv/lib/python3.8/site-packages/click/types.py create mode 100644 venv/lib/python3.8/site-packages/click/utils.py create mode 100644 venv/lib/python3.8/site-packages/easy_install.py create mode 100644 venv/lib/python3.8/site-packages/flask/__init__.py create mode 100644 venv/lib/python3.8/site-packages/flask/__main__.py create mode 100644 venv/lib/python3.8/site-packages/flask/app.py create mode 100644 venv/lib/python3.8/site-packages/flask/blueprints.py create mode 100644 venv/lib/python3.8/site-packages/flask/cli.py create mode 100644 venv/lib/python3.8/site-packages/flask/config.py create mode 100644 venv/lib/python3.8/site-packages/flask/ctx.py create mode 100644 venv/lib/python3.8/site-packages/flask/debughelpers.py create mode 100644 venv/lib/python3.8/site-packages/flask/globals.py create mode 100644 venv/lib/python3.8/site-packages/flask/helpers.py create mode 100644 venv/lib/python3.8/site-packages/flask/json/__init__.py create mode 100644 venv/lib/python3.8/site-packages/flask/json/tag.py create mode 100644 venv/lib/python3.8/site-packages/flask/logging.py create mode 100644 venv/lib/python3.8/site-packages/flask/py.typed create mode 100644 venv/lib/python3.8/site-packages/flask/scaffold.py create mode 100644 venv/lib/python3.8/site-packages/flask/sessions.py create mode 100644 venv/lib/python3.8/site-packages/flask/signals.py create mode 100644 venv/lib/python3.8/site-packages/flask/templating.py create mode 100644 venv/lib/python3.8/site-packages/flask/testing.py create mode 100644 venv/lib/python3.8/site-packages/flask/typing.py create mode 100644 venv/lib/python3.8/site-packages/flask/views.py create mode 100644 venv/lib/python3.8/site-packages/flask/wrappers.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/LICENSE create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/entry_points.txt create mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/gunicorn/__init__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/__main__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/__init__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/base.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/pasterapp.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/wsgiapp.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/arbiter.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/config.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/debug.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/errors.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/glogging.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/__init__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/body.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/errors.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/message.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/parser.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/unreader.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/wsgi.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/instrument/__init__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/instrument/statsd.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/pidfile.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/reloader.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/sock.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/systemd.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/util.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/__init__.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/base.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/base_async.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/geventlet.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/ggevent.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/gthread.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/gtornado.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/sync.py create mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/workertmp.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/LICENSE.rst create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/__init__.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/_json.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/encoding.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/exc.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/jws.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/py.typed create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/serializer.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/signer.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/timed.py create mode 100644 venv/lib/python3.8/site-packages/itsdangerous/url_safe.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/__init__.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/_identifier.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/async_utils.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/bccache.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/compiler.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/constants.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/debug.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/defaults.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/environment.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/ext.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/filters.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/idtracking.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/lexer.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/loaders.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/meta.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/nativetypes.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/nodes.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/optimizer.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/parser.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/py.typed create mode 100644 venv/lib/python3.8/site-packages/jinja2/runtime.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/sandbox.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/tests.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/utils.py create mode 100644 venv/lib/python3.8/site-packages/jinja2/visitor.py create mode 100644 venv/lib/python3.8/site-packages/markupsafe/__init__.py create mode 100644 venv/lib/python3.8/site-packages/markupsafe/_native.py create mode 100644 venv/lib/python3.8/site-packages/markupsafe/_speedups.c create mode 100755 venv/lib/python3.8/site-packages/markupsafe/_speedups.cpython-38-darwin.so create mode 100644 venv/lib/python3.8/site-packages/markupsafe/_speedups.pyi create mode 100644 venv/lib/python3.8/site-packages/markupsafe/py.typed create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/LICENSE.txt create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/entry_points.txt create mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/pip/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/__main__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/build_env.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/base_command.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/command_context.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/main.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/parser.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/req_command.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/spinners.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/check.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/completion.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/configuration.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/debug.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/download.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/freeze.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/hash.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/help.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/install.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/list.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/search.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/show.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/configuration.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/installed.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/collector.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/package_finder.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/locations.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/main.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/candidate.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/direct_url.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/format_control.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/index.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/link.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/scheme.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/search_scope.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/target_python.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/auth.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/download.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/session.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/utils.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/check.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/freeze.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/prepare.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/pyproject.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/constructors.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_file.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_install.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_set.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/legacy/resolver.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/resolver.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/encoding.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/glibc.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/hashes.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/logging.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/misc.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/models.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/packaging.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/typing.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/urls.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/git.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py create mode 100644 venv/lib/python3.8/site-packages/pip/_internal/wheel_builder.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/appdirs.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/__main__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/cacert.pem create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/core.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/big5freq.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/big5prober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/chardistribution.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/charsetgroupprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/charsetprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cli/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cli/chardetect.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/codingstatemachine.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cp949prober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/enums.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/escprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/escsm.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/eucjpprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euckrfreq.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euckrprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euctwfreq.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euctwprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/gb2312freq.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/gb2312prober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/hebrewprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/jisfreq.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/jpcntx.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langbulgarianmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langcyrillicmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langgreekmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langhebrewmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langhungarianmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langthaimodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langturkishmodel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/latin1prober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcharsetprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcsgroupprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcssm.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sbcharsetprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sbcsgroupprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sjisprober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/universaldetector.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/utf8prober.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/version.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/win32.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/contextlib2.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/database.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/index.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/locators.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/markers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/resources.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/util.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/version.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distro.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_ihatexml.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_inputstream.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_tokenizer.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/_base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/datrie.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/py.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_utils.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/serializer.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/base.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/codec.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/core.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/idnadata.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/intranges.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/package_data.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/uts46data.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/ipaddress.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/_version.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/fallback.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/__about__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_typing.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/markers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/tags.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/utils.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/version.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/_in_process.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/build.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/check.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/colorlog.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/dirtools.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/envbuild.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/meta.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/wrappers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pkg_resources/py31compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/bar.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/counter.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/spinner.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pyparsing.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/__version__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/_internal_utils.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/adapters.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/api.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/auth.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/certs.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/compat.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/cookies.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/help.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/hooks.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/models.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/packages.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/sessions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/structures.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/utils.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/retrying.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/six.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/decoder.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/encoder.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/ordered.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/tz.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/_collections.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/connection.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/connectionpool.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/appengine.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/securetransport.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/socks.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/fields.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/filepost.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/six.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/poolmanager.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/request.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/response.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/connection.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/queue.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/request.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/response.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/retry.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/ssl_.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/timeout.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/url.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/wait.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/vendor.txt create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/labels.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/mklabels.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/tests.py create mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/x_user_defined.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/six.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/extern/__init__.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/py2_warn.py create mode 100644 venv/lib/python3.8/site-packages/pkg_resources/py31compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/INSTALLER create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/LICENSE create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/METADATA create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/RECORD create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/WHEEL create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/dependency_links.txt create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/entry_points.txt create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/top_level.txt create mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/zip-safe create mode 100644 venv/lib/python3.8/site-packages/setuptools/__init__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_deprecation_warning.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_imp.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/__init__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/six.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/archive_util.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/build_meta.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/cli-32.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/cli-64.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/cli.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/__init__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/alias.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_egg.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_clib.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_ext.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_py.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/develop.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/dist_info.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/easy_install.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/egg_info.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_egg_info.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_lib.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_scripts.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/py36compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/register.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/rotate.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/saveopts.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/sdist.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/setopt.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/test.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/upload.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/command/upload_docs.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/config.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/dep_util.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/depends.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/dist.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/errors.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/extension.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/extern/__init__.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/glob.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/gui-32.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/gui-64.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/gui.exe create mode 100644 venv/lib/python3.8/site-packages/setuptools/installer.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/launch.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/lib2to3_ex.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/monkey.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/msvc.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/namespaces.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/package_index.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/py27compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/py31compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/py33compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/py34compat.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/sandbox.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/script (dev).tmpl create mode 100644 venv/lib/python3.8/site-packages/setuptools/script.tmpl create mode 100644 venv/lib/python3.8/site-packages/setuptools/site-patch.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/ssl_support.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/unicode_utils.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/version.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/wheel.py create mode 100644 venv/lib/python3.8/site-packages/setuptools/windows_support.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/__init__.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/_internal.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/_reloader.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/datastructures.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/datastructures.pyi create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/__init__.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/console.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/repr.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/FONT_LICENSE create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/ICON_LICENSE.md create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/console.png create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/debugger.js create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/less.png create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/more.png create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/source.png create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/style.css create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/ubuntu.ttf create mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/tbtools.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/exceptions.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/filesystem.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/formparser.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/http.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/local.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/py.typed create mode 100644 venv/lib/python3.8/site-packages/werkzeug/routing.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/request.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/response.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/security.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/serving.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/test.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/testapp.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/urls.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/user_agent.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/useragents.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/utils.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py create mode 100644 venv/lib/python3.8/site-packages/werkzeug/wsgi.py create mode 100644 venv/pyvenv.cfg diff --git a/Procfile b/Procfile index ca6e941..d06acd1 100644 --- a/Procfile +++ b/Procfile @@ -1 +1 @@ -web: gunicorn app:app +web: gunicorn wsgi:app diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 new file mode 100644 index 0000000..2fb3852 --- /dev/null +++ b/venv/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..8ef985e --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(venv) " != x ] ; then + PS1="(venv) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..0924157 --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("venv" != "") then + set env_name = "venv" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..02cb8c5 --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(venv) " + printf "%s%s" "(venv) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/bin/easy_install b/venv/bin/easy_install new file mode 100755 index 0000000..5ab24c3 --- /dev/null +++ b/venv/bin/easy_install @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install-3.8 b/venv/bin/easy_install-3.8 new file mode 100755 index 0000000..5ab24c3 --- /dev/null +++ b/venv/bin/easy_install-3.8 @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/flask b/venv/bin/flask new file mode 100755 index 0000000..5688306 --- /dev/null +++ b/venv/bin/flask @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from flask.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/gunicorn b/venv/bin/gunicorn new file mode 100755 index 0000000..e097b30 --- /dev/null +++ b/venv/bin/gunicorn @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from gunicorn.app.wsgiapp import run +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(run()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..a9a252d --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100755 index 0000000..a9a252d --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.8 b/venv/bin/pip3.8 new file mode 100755 index 0000000..a9a252d --- /dev/null +++ b/venv/bin/pip3.8 @@ -0,0 +1,8 @@ +#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 120000 index 0000000..778bb6a --- /dev/null +++ b/venv/bin/python @@ -0,0 +1 @@ +/Users/indraneel/opt/anaconda3/bin/python \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 120000 index 0000000..d8654aa --- /dev/null +++ b/venv/bin/python3 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA new file mode 100644 index 0000000..aaf27ca --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA @@ -0,0 +1,125 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 2.0.2 +Summary: A simple framework for building complex web applications. +Home-page: https://palletsprojects.com/p/flask +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://flask.palletsprojects.com/ +Project-URL: Changes, https://flask.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/flask/ +Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: Werkzeug (>=2.0) +Requires-Dist: Jinja2 (>=3.0) +Requires-Dist: itsdangerous (>=2.0) +Requires-Dist: click (>=7.1.2) +Provides-Extra: async +Requires-Dist: asgiref (>=3.2) ; extra == 'async' +Provides-Extra: dotenv +Requires-Dist: python-dotenv ; extra == 'dotenv' + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + +.. _WSGI: https://wsgi.readthedocs.io/ +.. _Werkzeug: https://werkzeug.palletsprojects.com/ +.. _Jinja: https://jinja.palletsprojects.com/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Flask + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + # save this as app.py + from flask import Flask + + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello, World!" + +.. code-block:: text + + $ flask run + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Contributing +------------ + +For guidance on setting up a development environment and how to make a +contribution to Flask, see the `contributing guidelines`_. + +.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://flask.palletsprojects.com/ +- Changes: https://flask.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Flask/ +- Source Code: https://github.com/pallets/flask/ +- Issue Tracker: https://github.com/pallets/flask/issues/ +- Website: https://palletsprojects.com/p/flask/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD new file mode 100644 index 0000000..9bdc8b6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD @@ -0,0 +1,51 @@ +../../../bin/flask,sha256=YvjprXzaHMVjHfJ2hgj3Y9r3ND3U4YQ97BDpyDoPtqU,262 +Flask-2.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask-2.0.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +Flask-2.0.2.dist-info/METADATA,sha256=aKsvjFA_ZjZN1jLh1Ac3aQk-ZUZDPrrwo_TGYW1kdAQ,3839 +Flask-2.0.2.dist-info/RECORD,, +Flask-2.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +Flask-2.0.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 +Flask-2.0.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 +flask/__init__.py,sha256=9ZCelLoNCpr6eSuLmYlzvbp12B3lrLgoN5U2UWk1vdo,2251 +flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +flask/__pycache__/__init__.cpython-38.pyc,, +flask/__pycache__/__main__.cpython-38.pyc,, +flask/__pycache__/app.cpython-38.pyc,, +flask/__pycache__/blueprints.cpython-38.pyc,, +flask/__pycache__/cli.cpython-38.pyc,, +flask/__pycache__/config.cpython-38.pyc,, +flask/__pycache__/ctx.cpython-38.pyc,, +flask/__pycache__/debughelpers.cpython-38.pyc,, +flask/__pycache__/globals.cpython-38.pyc,, +flask/__pycache__/helpers.cpython-38.pyc,, +flask/__pycache__/logging.cpython-38.pyc,, +flask/__pycache__/scaffold.cpython-38.pyc,, +flask/__pycache__/sessions.cpython-38.pyc,, +flask/__pycache__/signals.cpython-38.pyc,, +flask/__pycache__/templating.cpython-38.pyc,, +flask/__pycache__/testing.cpython-38.pyc,, +flask/__pycache__/typing.cpython-38.pyc,, +flask/__pycache__/views.cpython-38.pyc,, +flask/__pycache__/wrappers.cpython-38.pyc,, +flask/app.py,sha256=ectBbi9hGmVHAse5TNcFQZIDRkDAxYUAnLgfuKD0Xws,81975 +flask/blueprints.py,sha256=AkAVXZ_MMkjwjklzCAMdBNowTiM0wVQPynnUnXjTL2M,23781 +flask/cli.py,sha256=wn2Un9RO32ZfRmCMem5KJ5h62-5lnmy1H9uxgyV-eBs,32238 +flask/config.py,sha256=70Uyjh1Jzb9MfTCT7NDhuZWAzyIEu-TIyk6-22MP3zQ,11285 +flask/ctx.py,sha256=EM3W0v1ctuFQAGk_HWtQdoJEg_r2f5Le4xcmElxFwwk,17428 +flask/debughelpers.py,sha256=W82-xrRmodjopBngI9roYH-q08EbQwN2HEGfDAi6SA0,6184 +flask/globals.py,sha256=cWd-R2hUH3VqPhnmQNww892tQS6Yjqg_wg8UvW1M7NM,1723 +flask/helpers.py,sha256=00WqA3wYeyjMrnAOPZTUyrnUf7H8ik3CVT0kqGl_qjk,30589 +flask/json/__init__.py,sha256=unAKdZBlxMI5OMiTU0-Z2Hl4CF1CMJmqTUzpStiExNw,11822 +flask/json/__pycache__/__init__.cpython-38.pyc,, +flask/json/__pycache__/tag.cpython-38.pyc,, +flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857 +flask/logging.py,sha256=1o_hirVGqdj7SBdETnhX7IAjklG89RXlrwz_2CjzQQE,2273 +flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +flask/scaffold.py,sha256=fM9mRy7QBh9fhJ0VTogVx900dDa5oxz8FOw6OK5F-TU,32796 +flask/sessions.py,sha256=Kb7zY4qBIOU2cw1xM5mQ_KmgYUBDFbUYWjlkq0EFYis,15189 +flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136 +flask/templating.py,sha256=l96VD39JQ0nue4Bcj7wZ4-FWWs-ppLxvgBCpwDQ4KAk,5626 +flask/testing.py,sha256=OsHT-2B70abWH3ulY9IbhLchXIeyj3L-cfcDa88wv5E,10281 +flask/typing.py,sha256=hXEVcXoH-QEabmy1F11pYaQ2SonlkMAwfjBAnqj2x18,1982 +flask/views.py,sha256=nhq31TRB5Z-z2mjFGZACaaB2Et5XPCmWhWxJxOvLWww,5948 +flask/wrappers.py,sha256=VndbHPRBSUUOejmd2Y3ydkoCVUtsS2OJIdJEVIkBVD8,5604 diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt new file mode 100644 index 0000000..1eb0252 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +flask = flask.cli:main + diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..7e10602 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +flask diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA new file mode 100644 index 0000000..3b9355a --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA @@ -0,0 +1,113 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 3.0.3 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/jinja/ +Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe (>=2.0) +Provides-Extra: i18n +Requires-Dist: Babel (>=2.7) ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + + {% endblock %} + + +Donate +------ + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://jinja.palletsprojects.com/ +- Changes: https://jinja.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Jinja2/ +- Source Code: https://github.com/pallets/jinja/ +- Issue Tracker: https://github.com/pallets/jinja/issues/ +- Website: https://palletsprojects.com/p/jinja/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD new file mode 100644 index 0000000..e60c78b --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD @@ -0,0 +1,58 @@ +Jinja2-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-3.0.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-3.0.3.dist-info/METADATA,sha256=uvKoBSMLvh0qHK-6khEqSe1yOV4jxFzbPSREOp-3BXk,3539 +Jinja2-3.0.3.dist-info/RECORD,, +Jinja2-3.0.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +Jinja2-3.0.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61 +Jinja2-3.0.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=V3JjnTV-nyIHN6rwj03N1M11fegjGvv-weiHMQwH1pk,2205 +jinja2/__pycache__/__init__.cpython-38.pyc,, +jinja2/__pycache__/_identifier.cpython-38.pyc,, +jinja2/__pycache__/async_utils.cpython-38.pyc,, +jinja2/__pycache__/bccache.cpython-38.pyc,, +jinja2/__pycache__/compiler.cpython-38.pyc,, +jinja2/__pycache__/constants.cpython-38.pyc,, +jinja2/__pycache__/debug.cpython-38.pyc,, +jinja2/__pycache__/defaults.cpython-38.pyc,, +jinja2/__pycache__/environment.cpython-38.pyc,, +jinja2/__pycache__/exceptions.cpython-38.pyc,, +jinja2/__pycache__/ext.cpython-38.pyc,, +jinja2/__pycache__/filters.cpython-38.pyc,, +jinja2/__pycache__/idtracking.cpython-38.pyc,, +jinja2/__pycache__/lexer.cpython-38.pyc,, +jinja2/__pycache__/loaders.cpython-38.pyc,, +jinja2/__pycache__/meta.cpython-38.pyc,, +jinja2/__pycache__/nativetypes.cpython-38.pyc,, +jinja2/__pycache__/nodes.cpython-38.pyc,, +jinja2/__pycache__/optimizer.cpython-38.pyc,, +jinja2/__pycache__/parser.cpython-38.pyc,, +jinja2/__pycache__/runtime.cpython-38.pyc,, +jinja2/__pycache__/sandbox.cpython-38.pyc,, +jinja2/__pycache__/tests.cpython-38.pyc,, +jinja2/__pycache__/utils.cpython-38.pyc,, +jinja2/__pycache__/visitor.cpython-38.pyc,, +jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775 +jinja2/async_utils.py,sha256=jBcJSmLoQa2PjJdNcOpwaUmBxFNE9rZNwMF7Ob3dP9I,1947 +jinja2/bccache.py,sha256=v5rKAlYxIvfJEa0uGzAC6yCYSS3KuXT5Eqi-n9qvNi8,12670 +jinja2/compiler.py,sha256=v7zKz-mgSYXmfXD9mRmi2BU0B6Z-1RGZmOXCrsPKzc0,72209 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=r0JL0vfO7HPlyKZEdr6eVlg7HoIg2OQGmJ7SeUEyAeI,8494 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=Vz20npBX5-SUH_eguQuxrSQDEsLFjho0qcHLdMhY3hA,60983 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=44SjDjeYkkxQTpmC2BetOTxEFMgQ42p2dfSwXmPFcSo,32122 +jinja2/filters.py,sha256=jusKTZbd0ddZMaibZkxMUVKNsOsaYtOq_Il8Imtx4BE,52609 +jinja2/idtracking.py,sha256=WekexMql3u5n3vDxFsQ_i8HW0j24AtjWTjrPBLWrHww,10721 +jinja2/lexer.py,sha256=qNEQqDQw_zO5EaH6rFQsER7Qwn2du0o22prB-TR11HE,29930 +jinja2/loaders.py,sha256=1MjXJOU6p4VywFqtpDZhtvtT_vIlmHnZKMKHHw4SZzA,22754 +jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 +jinja2/nativetypes.py,sha256=KCJl71MogrDih_BHBu6xV5p7Cr_jggAgu-shKTg6L28,3969 +jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 +jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 +jinja2/parser.py,sha256=kHnU8v92GwMYkfr0MVakWv8UlSf_kJPx8LUsgQMof70,39767 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=wVRlkEmAgNU67AIQDqLvI6UkNLkzDqpLA-z4Mi3vl3g,35054 +jinja2/sandbox.py,sha256=-8zxR6TO9kUkciAVFsIKu8Oq-C7PTeYEdZ5TtA55-gw,14600 +jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 +jinja2/utils.py,sha256=udQxWIKaq4QDCZiXN31ngKOaGGdaMA5fl0JMaM-F6fg,26971 +jinja2/visitor.py,sha256=ZmeLuTj66ic35-uFH-1m0EKXiw4ObDDb_WuE6h5vPFg,3572 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt new file mode 100644 index 0000000..3619483 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract [i18n] + diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA new file mode 100644 index 0000000..ef44e2b --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA @@ -0,0 +1,100 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.0.1 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Website: https://palletsprojects.com/p/markupsafe/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD new file mode 100644 index 0000000..274f151 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.0.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.0.1.dist-info/METADATA,sha256=FmPpxBdaqCCjF-XKqoxeEzqAzhetQnrkkSsd3V3X-Jc,3211 +MarkupSafe-2.0.1.dist-info/RECORD,, +MarkupSafe-2.0.1.dist-info/WHEEL,sha256=RA4ju32EWHpO-G1BhxTQ3AwXQWjnnrzYGkM9skoN7_I,109 +MarkupSafe-2.0.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=9Tez4UIlI7J6_sQcUFK1dKniT_b_8YefpGIyYJ3Sr2Q,8923 +markupsafe/__pycache__/__init__.cpython-38.pyc,, +markupsafe/__pycache__/_native.cpython-38.pyc,, +markupsafe/_native.py,sha256=GTKEV-bWgZuSjklhMHOYRHU9k0DMewTf5mVEZfkbuns,1986 +markupsafe/_speedups.c,sha256=CDDtwaV21D2nYtypnMQzxvvpZpcTvIs8OZ6KDa1g4t0,7400 +markupsafe/_speedups.cpython-38-darwin.so,sha256=Tjek-7TDXevv7qgNBsLLUfsOQlfCSCGzhhV5cCUrIxk,35480 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL new file mode 100644 index 0000000..d70ba8e --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp38-cp38-macosx_10_9_x86_64 + diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA new file mode 100644 index 0000000..b58b9bd --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA @@ -0,0 +1,129 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 2.0.2 +Summary: The comprehensive WSGI web application library. +Home-page: https://palletsprojects.com/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: dataclasses ; python_version < "3.7" +Provides-Extra: watchdog +Requires-Dist: watchdog ; extra == 'watchdog' + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug doesn't enforce any dependencies. It is up to the developer to +choose a template engine, database adapter, and even how to handle +requests. It can be used to build all sorts of end user applications +such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Donate +------ + +The Pallets organization develops and supports Werkzeug and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://werkzeug.palletsprojects.com/ +- Changes: https://werkzeug.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Werkzeug/ +- Source Code: https://github.com/pallets/werkzeug/ +- Issue Tracker: https://github.com/pallets/werkzeug/issues/ +- Website: https://palletsprojects.com/p/werkzeug/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD new file mode 100644 index 0000000..a566dd6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD @@ -0,0 +1,111 @@ +Werkzeug-2.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Werkzeug-2.0.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Werkzeug-2.0.2.dist-info/METADATA,sha256=vh_xrARtpmkFYnWRAgfSiHgl66LH143rMfAfPZo-R_E,4452 +Werkzeug-2.0.2.dist-info/RECORD,, +Werkzeug-2.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +Werkzeug-2.0.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +werkzeug/__init__.py,sha256=Wx1PLCftJ7UAS0fBXEO4Prdr6kvEQ124Stwg-XwyhW4,188 +werkzeug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/__pycache__/_internal.cpython-38.pyc,, +werkzeug/__pycache__/_reloader.cpython-38.pyc,, +werkzeug/__pycache__/datastructures.cpython-38.pyc,, +werkzeug/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/__pycache__/filesystem.cpython-38.pyc,, +werkzeug/__pycache__/formparser.cpython-38.pyc,, +werkzeug/__pycache__/http.cpython-38.pyc,, +werkzeug/__pycache__/local.cpython-38.pyc,, +werkzeug/__pycache__/routing.cpython-38.pyc,, +werkzeug/__pycache__/security.cpython-38.pyc,, +werkzeug/__pycache__/serving.cpython-38.pyc,, +werkzeug/__pycache__/test.cpython-38.pyc,, +werkzeug/__pycache__/testapp.cpython-38.pyc,, +werkzeug/__pycache__/urls.cpython-38.pyc,, +werkzeug/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/__pycache__/useragents.cpython-38.pyc,, +werkzeug/__pycache__/utils.cpython-38.pyc,, +werkzeug/__pycache__/wsgi.cpython-38.pyc,, +werkzeug/_internal.py,sha256=_QKkvdaG4pDFwK68c0EpPzYJGe9Y7toRAT1cBbC-CxU,18572 +werkzeug/_reloader.py,sha256=B1hEfgsUOz2IginBQM5Zak_eaIF7gr3GS5-0x2OHvAE,13950 +werkzeug/datastructures.py,sha256=m79A8rHQEt5B7qVqyrjARXzHL66Katn8S92urGscTw4,97929 +werkzeug/datastructures.pyi,sha256=CoVwrQ2Vr9JnbprNL9aE3vOz8mOejT9qysQ-BT53C8Y,34089 +werkzeug/debug/__init__.py,sha256=jYA1e1Gw_8EPOytr-BoMdmm0rzP-Z1H0Ih7wIObnKwQ,17968 +werkzeug/debug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/debug/__pycache__/console.cpython-38.pyc,, +werkzeug/debug/__pycache__/repr.cpython-38.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,, +werkzeug/debug/console.py,sha256=E1nBMEvFkX673ShQjPtVY-byYatfX9MN-dBMjRI8a8E,5897 +werkzeug/debug/repr.py,sha256=QCSHENKsChEZDCIApkVi_UNjhJ77v8BMXK1OfxO189M,9483 +werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 +werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 +werkzeug/debug/shared/style.css,sha256=h1ZSUVaKNpfbfcYzRb513WAhPySGDQom1uih3uEDxPw,6704 +werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 +werkzeug/debug/tbtools.py,sha256=AFRrjLDCAps7G5K2-RxNZpXXaEoeFHm68T00f4vlDYA,19362 +werkzeug/exceptions.py,sha256=CUwx0pBiNbk4f9cON17ekgKnmLi6HIVFjUmYZc2x0wM,28681 +werkzeug/filesystem.py,sha256=JS2Dv2QF98WILxY4_thHl-WMcUcwluF_4igkDPaP1l4,1956 +werkzeug/formparser.py,sha256=X-p3Ek4ji8XrKrbmaWxr8StLSc6iuksbpIeweaabs4s,17400 +werkzeug/http.py,sha256=oUCXFFMnkOQ-cHbUY_aiqitshcrSzNDq3fEMf1VI_yk,45141 +werkzeug/local.py,sha256=bwL-y3-qOZAspJ66W1P36SUApLXJy3UY8nLYbM9kfmY,23183 +werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500 +werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-38.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,, +werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,, +werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580 +werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558 +werkzeug/middleware/lint.py,sha256=sAg3GcOhICIkwYX5bJGG8n8iebX0Yipq_UH0HvrBvoU,13964 +werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899 +werkzeug/middleware/proxy_fix.py,sha256=uRgQ3dEvFV8JxUqajHYYYOPEeA_BFqaa51Yp8VW0uzA,6849 +werkzeug/middleware/shared_data.py,sha256=xydEqOhAGg0aQJEllPDVfz2-8jHwWvJpAxfPsfPCu7k,10960 +werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/routing.py,sha256=oqJ32sWIZtIF6zbqfrnwB1Pbv2ShNwPDJd6FYqxdYVo,84527 +werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,, +werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,, +werkzeug/sansio/__pycache__/request.cpython-38.pyc,, +werkzeug/sansio/__pycache__/response.cpython-38.pyc,, +werkzeug/sansio/__pycache__/utils.cpython-38.pyc,, +werkzeug/sansio/multipart.py,sha256=bJMCNC2f5xyAaylahNViJ0JqmV4ThLRbDVGVzKwcqrQ,8751 +werkzeug/sansio/request.py,sha256=aA9rABkWiG4MhYMByanst2NXkEclsq8SIxhb0LQf0e0,20228 +werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098 +werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164 +werkzeug/security.py,sha256=gPDRuCjkjWrcqj99tBMq8_nHFZLFQjgoW5Ga5XIw9jo,8158 +werkzeug/serving.py,sha256=AfgLn0yKr9qXknmwO-0KXJ055oloS4h5DIFDHEu8iHA,38088 +werkzeug/test.py,sha256=8gE1l-Y9yAh2i3SI0kgpxIaI4oYZuehIkxxyDFcz6J0,48123 +werkzeug/testapp.py,sha256=f48prWSGJhbSrvYb8e1fnAah4BkrLb0enHSdChgsjBY,9471 +werkzeug/urls.py,sha256=Du2lreBHvgBh5c2_bcx72g3hzV2ZabXYZsp-picUIJs,41023 +werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420 +werkzeug/useragents.py,sha256=G8tmv_6vxJaPrLQH3eODNgIYe0_V6KETROQlJI-WxDE,7264 +werkzeug/utils.py,sha256=D_dnCLUfodQ4k0GRSpnI6qDoVoaX7-Dza57bx7sabG0,37101 +werkzeug/wrappers/__init__.py,sha256=-s75nPbyXHzU_rwmLPDhoMuGbEUk0jZT_n0ZQAOFGf8,654 +werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/accept.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/auth.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/base_request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/base_response.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/common_descriptors.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/cors.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/etag.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/json.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/wrappers/accept.py,sha256=_oZtAQkahvsrPRkNj2fieg7_St9P0NFC3SgZbJKS6xU,429 +werkzeug/wrappers/auth.py,sha256=rZPCzGxHk9R55PRkmS90kRywUVjjuMWzCGtH68qCq8U,856 +werkzeug/wrappers/base_request.py,sha256=saz9RyNQkvI_XLPYVm29KijNHmD1YzgxDqa0qHTbgss,1174 +werkzeug/wrappers/base_response.py,sha256=q_-TaYywT5G4zA-DWDRDJhJSat2_4O7gOPob6ye4_9A,1186 +werkzeug/wrappers/common_descriptors.py,sha256=v_kWLH3mvCiSRVJ1FNw7nO3w2UJfzY57UKKB5J4zCvE,898 +werkzeug/wrappers/cors.py,sha256=c5UndlZsZvYkbPrp6Gj5iSXxw_VOJDJHskO6-jRmNyQ,846 +werkzeug/wrappers/etag.py,sha256=XHWQQs7Mdd1oWezgBIsl-bYe8ydKkRZVil2Qd01D0Mo,846 +werkzeug/wrappers/json.py,sha256=HM1btPseGeXca0vnwQN_MvZl6h-qNsFY5YBKXKXFwus,410 +werkzeug/wrappers/request.py,sha256=yZGplfC3UqNuykwLJmgywiMhmnoKEGHJOZn_A_ublcQ,24822 +werkzeug/wrappers/response.py,sha256=0n8OcQptiM2e550SALLeg7vC1uWsUbCeE1rPZFfXR78,35177 +werkzeug/wrappers/user_agent.py,sha256=Wl1-A0-1r8o7cHIZQTB55O4Ged6LpCKENaQDlOY5pXA,435 +werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727 diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA new file mode 100644 index 0000000..2c8da01 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.0.3 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Website: https://palletsprojects.com/p/click +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD new file mode 100644 index 0000000..263a6c6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD @@ -0,0 +1,41 @@ +click-8.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.0.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.0.3.dist-info/METADATA,sha256=_0jCOf3DdGPvKUZUlBukeb1t6Pnxmm_OMGpaBoDthfc,3247 +click-8.0.3.dist-info/RECORD,, +click-8.0.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +click-8.0.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=YkIrDg7-0g5aBS6D2pDe58j3MOaFylHED2_8OXh2fnM,3243 +click/__pycache__/__init__.cpython-38.pyc,, +click/__pycache__/_compat.cpython-38.pyc,, +click/__pycache__/_termui_impl.cpython-38.pyc,, +click/__pycache__/_textwrap.cpython-38.pyc,, +click/__pycache__/_unicodefun.cpython-38.pyc,, +click/__pycache__/_winconsole.cpython-38.pyc,, +click/__pycache__/core.cpython-38.pyc,, +click/__pycache__/decorators.cpython-38.pyc,, +click/__pycache__/exceptions.cpython-38.pyc,, +click/__pycache__/formatting.cpython-38.pyc,, +click/__pycache__/globals.cpython-38.pyc,, +click/__pycache__/parser.cpython-38.pyc,, +click/__pycache__/shell_completion.cpython-38.pyc,, +click/__pycache__/termui.cpython-38.pyc,, +click/__pycache__/testing.cpython-38.pyc,, +click/__pycache__/types.cpython-38.pyc,, +click/__pycache__/utils.cpython-38.pyc,, +click/_compat.py,sha256=P15KQumAZC2F2MFe_JSRbvVOJcNosQfMDrdZq0ReCLM,18814 +click/_termui_impl.py,sha256=z78J5HF_RTsOBhjNLjoigaqRap3P2pWwEDDAjoZzgUg,23452 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_unicodefun.py,sha256=JKSh1oSwG_zbjAu4TBCa9tQde2P9FiYcf4MBfy5NdT8,3201 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=k4PA2z0BT_dmed9I52Q2VLi8r6ekTMCtCQzw2y915Xs,111478 +click/decorators.py,sha256=sGkXJGmP7eLtjtmPl_Un2uBTlrhK8s2L22n-yBiDwTw,14864 +click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=kGPzxq55Ug4dFUrgRV-5oHVPOPdLCUhmYolbrrVBo8g,1985 +click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=_hPI12T9Ex-y5a3WunWnlH0Gca2_urzXFXkDnt7G6Ow,18001 +click/termui.py,sha256=Rp2gFE8x7j8sEIoFMOcPmuqxJQVWWTrwEzyC14-sPAw,29006 +click/testing.py,sha256=kLR5Qcny1OlgxaGB3gweTr0gQe1yVlmgQRn2esA2Fz4,16020 +click/types.py,sha256=VoNZnIlRBAtRRgzavdqVnyfzY5y4U4qzVGI1UvvX1ls,35391 +click/utils.py,sha256=avYwX-3l2KkdJNUo8NmncZSoAdEmniQ_M5sdsSYloJ4,18759 diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt new file mode 100644 index 0000000..dca9a90 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/venv/lib/python3.8/site-packages/click/__init__.py b/venv/lib/python3.8/site-packages/click/__init__.py new file mode 100644 index 0000000..a2ed5d1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/__init__.py @@ -0,0 +1,75 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import get_terminal_size as get_terminal_size +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_os_args as get_os_args +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.0.3" diff --git a/venv/lib/python3.8/site-packages/click/_compat.py b/venv/lib/python3.8/site-packages/click/_compat.py new file mode 100644 index 0000000..b9e1f0d --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_compat.py @@ -0,0 +1,627 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( + "SERVER_SOFTWARE", "" +) +WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def get_filesystem_encoding() -> str: + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO, default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO, default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO, bool], bool], + find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, os.PathLike, int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO, bool]: + binary = "b" in mode + + # Standard streams first. These are simple because they don't need + # special handling for the atomic flag. It's entirely ignored. + if filename == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO, af), True + + +class _AtomicFile: + def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO] +) -> t.Callable[[], t.TextIO]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.TextIO: + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/venv/lib/python3.8/site-packages/click/_termui_impl.py b/venv/lib/python3.8/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..39c1d08 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_termui_impl.py @@ -0,0 +1,718 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or "" + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width = width + self.autowidth = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width: t.Optional[int] = None + self.entered = False + self.current_item: t.Optional[V] = None + self.is_hidden = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar": + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/venv/lib/python3.8/site-packages/click/_textwrap.py b/venv/lib/python3.8/site-packages/click/_textwrap.py new file mode 100644 index 0000000..b47dcbd --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/venv/lib/python3.8/site-packages/click/_unicodefun.py b/venv/lib/python3.8/site-packages/click/_unicodefun.py new file mode 100644 index 0000000..9cb30c3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_unicodefun.py @@ -0,0 +1,100 @@ +import codecs +import os +from gettext import gettext as _ + + +def _verify_python_env() -> None: + """Ensures that the environment is good for Unicode.""" + try: + from locale import getpreferredencoding + + fs_enc = codecs.lookup(getpreferredencoding()).name + except Exception: + fs_enc = "ascii" + + if fs_enc != "ascii": + return + + extra = [ + _( + "Click will abort further execution because Python was" + " configured to use ASCII as encoding for the environment." + " Consult https://click.palletsprojects.com/unicode-support/" + " for mitigation steps." + ) + ] + + if os.name == "posix": + import subprocess + + try: + rv = subprocess.Popen( + ["locale", "-a"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="ascii", + errors="replace", + ).communicate()[0] + except OSError: + rv = "" + + good_locales = set() + has_c_utf8 = False + + for line in rv.splitlines(): + locale = line.strip() + + if locale.lower().endswith((".utf-8", ".utf8")): + good_locales.add(locale) + + if locale.lower() in ("c.utf8", "c.utf-8"): + has_c_utf8 = True + + if not good_locales: + extra.append( + _( + "Additional information: on this system no suitable" + " UTF-8 locales were discovered. This most likely" + " requires resolving by reconfiguring the locale" + " system." + ) + ) + elif has_c_utf8: + extra.append( + _( + "This system supports the C.UTF-8 locale which is" + " recommended. You might be able to resolve your" + " issue by exporting the following environment" + " variables:" + ) + ) + extra.append(" export LC_ALL=C.UTF-8\n export LANG=C.UTF-8") + else: + extra.append( + _( + "This system lists some UTF-8 supporting locales" + " that you can pick from. The following suitable" + " locales were discovered: {locales}" + ).format(locales=", ".join(sorted(good_locales))) + ) + + bad_locale = None + + for env_locale in os.environ.get("LC_ALL"), os.environ.get("LANG"): + if env_locale and env_locale.lower().endswith((".utf-8", ".utf8")): + bad_locale = env_locale + + if env_locale is not None: + break + + if bad_locale is not None: + extra.append( + _( + "Click discovered that you exported a UTF-8 locale" + " but the locale system could not pick up from it" + " because it does not exist. The exported locale is" + " {locale!r} but it is not supported." + ).format(locale=bad_locale) + ) + + raise RuntimeError("\n\n".join(extra)) diff --git a/venv/lib/python3.8/site-packages/click/_winconsole.py b/venv/lib/python3.8/site-packages/click/_winconsole.py new file mode 100644 index 0000000..6b20df3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/venv/lib/python3.8/site-packages/click/core.py b/venv/lib/python3.8/site-packages/click/core.py new file mode 100644 index 0000000..f226354 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/core.py @@ -0,0 +1,2953 @@ +import enum +import errno +import os +import sys +import typing +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import partial +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat + +from . import types +from ._unicodefun import _verify_python_env +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show defaults for all options. If not set, + defaults to the value from a parent context. Overrides an + option's ``show_default`` argument. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.Dict[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @typing.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @typing.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", t.Callable[..., t.Any]], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = other_cmd.callback + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.Dict[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invokable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @typing.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @typing.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + # Verify that the environment is configured correctly, or reject + # further execution to avoid a broken script. + _verify_python_env() + + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() from None + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + """ + if complete_var is None: + complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + .. versionchanged:: 8.0 + Added repr showing the command name + .. versionchanged:: 7.1 + Added the `no_args_is_help` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + + # if a form feed (page break) is found in the help text, truncate help + # text to the content preceding the first form feed + if help and "\f" in help: + help = help.split("\f", 1)[0] + + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + text = self.short_help or "" + + if not text and self.help: + text = make_default_short_help(self.help, limit) + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + text = self.help or "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(self.epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) # type: ignore + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def resultcallback(self, replace: bool = False) -> t.Callable[[F], F]: + import warnings + + warnings.warn( + "'resultcallback' has been renamed to 'result_callback'." + " The old name will be removed in Click 8.1.", + DeprecationWarning, + stacklevel=2, + ) + return self.result_callback(replace=replace) + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with None for regular groups, or an empty list + # for chained groups. + with ctx: + super().invoke(ctx) + return _process_result([] if self.chain else None) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commmands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.Dict[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + if self.command_class is not None and "cls" not in kwargs: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + return decorator + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + if self.group_class is not None and "cls" not in kwargs: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + autocompletion: t.Optional[ + t.Callable[ + [Context, t.List[str], str], t.List[t.Union[t.Tuple[str, str], str]] + ] + ] = None, + ) -> None: + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + + if autocompletion is not None: + import warnings + + warnings.warn( + "'autocompletion' is renamed to 'shell_complete'. The old name is" + " deprecated and will be removed in Click 8.1. See the docs about" + " 'Parameter' for information about new behavior.", + DeprecationWarning, + stacklevel=2, + ) + + def shell_complete( + ctx: Context, param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + from click.shell_completion import CompletionItem + + out = [] + + for c in autocompletion(ctx, [], incomplete): # type: ignore + if isinstance(c, tuple): + c = CompletionItem(c[0], help=c[1]) + elif isinstance(c, str): + c = CompletionItem(c) + + if c.value.startswith(incomplete): + out.append(c) + + return out + + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @typing.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @typing.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + convert: t.Callable[[t.Any], t.Any] = partial( + self.type, param=self, ctx=ctx + ) + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Tuple: + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Tuple: + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: controls if the default value should be shown on the + help page. Normally, defaults are not shown. If this + value is a string, it shows the string instead of the + value. This is particularly useful for dynamic options. + :param show_envvar: controls if an environment variable should be shown on + the help page. Normally, environment variables + are not shown. + :param prompt: if set to `True` or a non empty string then the user will be + prompted for input. If set to `True` the prompt will be the + option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: if this is `True` then the input on the prompt will be + hidden from the user. This is useful for password + input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str] = False, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = t.cast(str, prompt) + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + if is_flag and default_is_missing: + self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False + + if flag_value is None: + flag_value = not self.default + + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default_is_str = isinstance(self.show_default, str) + + if show_default_is_str or ( + default_value is not None and (self.show_default or ctx.show_default) + ): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif callable(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @typing.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @typing.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value # type: ignore + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + return rv + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/venv/lib/python3.8/site-packages/click/decorators.py b/venv/lib/python3.8/site-packages/click/decorators.py new file mode 100644 index 0000000..f1cc005 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/decorators.py @@ -0,0 +1,436 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +FC = t.TypeVar("FC", t.Callable[..., t.Any], Command) + + +def pass_context(f: F) -> F: + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def pass_obj(f: F) -> F: + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def make_pass_decorator( + object_type: t.Type, ensure: bool = False +) -> "t.Callable[[F], F]": + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[F], F]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +def _make_command( + f: F, + name: t.Optional[str], + attrs: t.MutableMapping[str, t.Any], + cls: t.Type[Command], +) -> Command: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + try: + params = f.__click_params__ # type: ignore + params.reverse() + del f.__click_params__ # type: ignore + except AttributeError: + params = [] + + help = attrs.get("help") + + if help is None: + help = inspect.getdoc(f) + else: + help = inspect.cleandoc(help) + + attrs["help"] = help + return cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + + +def command( + name: t.Optional[str] = None, + cls: t.Optional[t.Type[Command]] = None, + **attrs: t.Any, +) -> t.Callable[[F], Command]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + """ + if cls is None: + cls = Command + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd = _make_command(f, name, attrs, cls) # type: ignore + cmd.__doc__ = f.__doc__ + return cmd + + return decorator + + +def group(name: t.Optional[str] = None, **attrs: t.Any) -> t.Callable[[F], Group]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + """ + attrs.setdefault("cls", Group) + return t.cast(Group, command(name, **attrs)) + + +def _param_memo(f: FC, param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + + def decorator(f: FC) -> FC: + ArgumentClass = attrs.pop("cls", Argument) + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + + return decorator + + +def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + + def decorator(f: FC) -> FC: + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + + if "help" in option_attrs: + option_attrs["help"] = inspect.cleandoc(option_attrs["help"]) + OptionClass = option_attrs.pop("cls", Option) + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + t.cast(str, message) + % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.8/site-packages/click/exceptions.py b/venv/lib/python3.8/site-packages/click/exceptions.py new file mode 100644 index 0000000..9e20b3e --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/exceptions.py @@ -0,0 +1,287 @@ +import os +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo + +if t.TYPE_CHECKING: + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename = os.fsdecode(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code = code diff --git a/venv/lib/python3.8/site-packages/click/formatting.py b/venv/lib/python3.8/site-packages/click/formatting.py new file mode 100644 index 0000000..ddd2a2f --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.8/site-packages/click/globals.py b/venv/lib/python3.8/site-packages/click/globals.py new file mode 100644 index 0000000..a7b0c93 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/globals.py @@ -0,0 +1,69 @@ +import typing +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@typing.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@typing.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/venv/lib/python3.8/site-packages/click/parser.py b/venv/lib/python3.8/site-packages/click/parser.py new file mode 100644 index 0000000..2d5a2ed --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: str, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/venv/lib/python3.8/site-packages/click/py.typed b/venv/lib/python3.8/site-packages/click/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/click/shell_completion.py b/venv/lib/python3.8/site-packages/click/shell_completion.py new file mode 100644 index 0000000..cad080d --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/shell_completion.py @@ -0,0 +1,581 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value = value + self.type = type + self.help = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +compdef %(complete_func)s %(prog_name)s; +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response; + + for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + set response $response $value; + end; + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + def _check_version(self) -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + raise RuntimeError( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ) + ) + else: + raise RuntimeError( + _("Couldn't detect Bash version, shell completion is not supported.") + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: t.Type[ShellComplete], name: t.Optional[str] = None +) -> None: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + value = ctx.params[param.name] + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + # Allow "/" since that starts a path. + return not c.isalnum() and c != "/" + + +def _is_incomplete_option(args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str] +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/venv/lib/python3.8/site-packages/click/termui.py b/venv/lib/python3.8/site-packages/click/termui.py new file mode 100644 index 0000000..cf8d5f1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/termui.py @@ -0,0 +1,809 @@ +import inspect +import io +import itertools +import os +import sys +import typing +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from ._compat import WIN +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name # type: ignore + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending a interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = t.cast(str, confirmation_prompt) + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + confirmation_prompt = t.cast(str, confirmation_prompt) + value2 = prompt_func(confirmation_prompt) + if value2: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def get_terminal_size() -> os.terminal_size: + """Returns the current size of the terminal as tuple in the form + ``(width, height)`` in columns and rows. + + .. deprecated:: 8.0 + Will be removed in Click 8.1. Use + :func:`shutil.get_terminal_size` instead. + """ + import shutil + import warnings + + warnings.warn( + "'click.get_terminal_size()' is deprecated and will be removed" + " in Click 8.1. Use 'shutil.get_terminal_size()' instead.", + DeprecationWarning, + stacklevel=2, + ) + return shutil.get_terminal_size() + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + if WIN: + os.system("cls") + else: + sys.stdout.write("\033[2J\033[1;1H") + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/venv/lib/python3.8/site-packages/click/testing.py b/venv/lib/python3.8/site-packages/click/testing.py new file mode 100644 index 0000000..d19b850 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO, input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(t.cast(bytes, input)) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + t = tempfile.mkdtemp(dir=temp_dir) + os.chdir(t) + + try: + yield t + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(t) + except OSError: # noqa: B014 + pass diff --git a/venv/lib/python3.8/site-packages/click/types.py b/venv/lib/python3.8/site-packages/click/types.py new file mode 100644 index 0000000..103d218 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/types.py @@ -0,0 +1,1052 @@ +import os +import stat +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + return {"param_type": param_type, "name": self.name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: t.Any) -> bool: + if self.lazy is not None: + return self.lazy + if value == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + if hasattr(value, "read") or hasattr(value, "write"): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f: t.IO = t.cast( + t.IO, + LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ), + ) + + if ctx is not None: + ctx.call_on_close(f.close_intelligently) # type: ignore + + return f + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"{os.fsdecode(value)!r}: {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +class Path(ParamType): + """The path type is similar to the :class:`File` type but it performs + different checks. First of all, instead of returning an open file + handle it returns just the filename. Secondly, it can perform various + basic checks about what the file or directory should be. + + :param exists: if set to true, the file or directory needs to exist for + this value to be valid. If this is not required and a + file does indeed not exist, then all further checks are + silently skipped. + :param file_okay: controls if a file is a possible value. + :param dir_okay: controls if a directory is a possible value. + :param writable: if true, a writable check is performed. + :param readable: if true, a readable check is performed. + :param resolve_path: if this is true, then the path is fully resolved + before the value is passed onwards. This means + that it's absolute and symlinks are resolved. It + will not expand a tilde-prefix, as this is + supposed to be done by the shell only. + :param allow_dash: If this is set to `True`, a single dash to indicate + standard streams is permitted. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.0 + Allow passing ``type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type] = None, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.writable = writable + self.readable = readable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result(self, rv: t.Any) -> t.Any: + if self.type is not None and not isinstance(rv, self.type): + if self.type is str: + rv = os.fsdecode(rv) + elif self.type is bytes: + rv = os.fsencode(rv) + else: + rv = self.type(rv) + + return rv + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} {filename!r} is a directory.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None: + self.types = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/venv/lib/python3.8/site-packages/click/utils.py b/venv/lib/python3.8/site-packages/click/utils.py new file mode 100644 index 0000000..16033d6 --- /dev/null +++ b/venv/lib/python3.8/site-packages/click/utils.py @@ -0,0 +1,579 @@ +import os +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: F) -> F: + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args, **kwargs): # type: ignore + try: + return func(*args, **kwargs) + except Exception: + pass + + return update_wrapper(t.cast(F, wrapper), func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO] + + if filename == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO) -> None: + self._file = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO: + """This is similar to how the :class:`File` works but for manual + usage. Files are opened non lazy by default. This can open regular + files as well as stdin/stdout if ``'-'`` is passed. + + If stdin/stdout is returned the stream is wrapped so that the context + manager will not close the stream accidentally. This makes it possible + to always use the function like this without having to worry to + accidentally close a standard stream:: + + with open_file(filename) as f: + ... + + .. versionadded:: 3.0 + + :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). + :param mode: the mode in which to open the file. + :param encoding: the encoding to use. + :param errors: the error handling for this file. + :param lazy: can be flipped to true to open the file lazily. + :param atomic: in atomic mode writes go into a temporary file and it's + moved on close. + """ + if lazy: + return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic)) + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + if not should_close: + f = t.cast(t.IO, KeepOpenFile(f)) + return f + + +def get_os_args() -> t.Sequence[str]: + """Returns the argument part of ``sys.argv``, removing the first + value which is the name of the script. + + .. deprecated:: 8.0 + Will be removed in Click 8.1. Access ``sys.argv[1:]`` directly + instead. + """ + import warnings + + warnings.warn( + "'get_os_args' is deprecated and will be removed in Click 8.1." + " Access 'sys.argv[1:]' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + return sys.argv[1:] + + +def format_filename( + filename: t.Union[str, bytes, os.PathLike], shorten: bool = False +) -> str: + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + + return os.fsdecode(filename) + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: ModuleType = sys.modules["__main__"] +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + if getattr(_main, "__package__", None) is None or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + matches = glob(arg, recursive=glob_recursive) + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/venv/lib/python3.8/site-packages/easy_install.py b/venv/lib/python3.8/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/venv/lib/python3.8/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/venv/lib/python3.8/site-packages/flask/__init__.py b/venv/lib/python3.8/site-packages/flask/__init__.py new file mode 100644 index 0000000..43b5468 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/__init__.py @@ -0,0 +1,46 @@ +from markupsafe import escape +from markupsafe import Markup +from werkzeug.exceptions import abort as abort +from werkzeug.utils import redirect as redirect + +from . import json as json +from .app import Flask as Flask +from .app import Request as Request +from .app import Response as Response +from .blueprints import Blueprint as Blueprint +from .config import Config as Config +from .ctx import after_this_request as after_this_request +from .ctx import copy_current_request_context as copy_current_request_context +from .ctx import has_app_context as has_app_context +from .ctx import has_request_context as has_request_context +from .globals import _app_ctx_stack as _app_ctx_stack +from .globals import _request_ctx_stack as _request_ctx_stack +from .globals import current_app as current_app +from .globals import g as g +from .globals import request as request +from .globals import session as session +from .helpers import flash as flash +from .helpers import get_flashed_messages as get_flashed_messages +from .helpers import get_template_attribute as get_template_attribute +from .helpers import make_response as make_response +from .helpers import safe_join as safe_join +from .helpers import send_file as send_file +from .helpers import send_from_directory as send_from_directory +from .helpers import stream_with_context as stream_with_context +from .helpers import url_for as url_for +from .json import jsonify as jsonify +from .signals import appcontext_popped as appcontext_popped +from .signals import appcontext_pushed as appcontext_pushed +from .signals import appcontext_tearing_down as appcontext_tearing_down +from .signals import before_render_template as before_render_template +from .signals import got_request_exception as got_request_exception +from .signals import message_flashed as message_flashed +from .signals import request_finished as request_finished +from .signals import request_started as request_started +from .signals import request_tearing_down as request_tearing_down +from .signals import signals_available as signals_available +from .signals import template_rendered as template_rendered +from .templating import render_template as render_template +from .templating import render_template_string as render_template_string + +__version__ = "2.0.2" diff --git a/venv/lib/python3.8/site-packages/flask/__main__.py b/venv/lib/python3.8/site-packages/flask/__main__.py new file mode 100644 index 0000000..4e28416 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/venv/lib/python3.8/site-packages/flask/app.py b/venv/lib/python3.8/site-packages/flask/app.py new file mode 100644 index 0000000..23b99e2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/app.py @@ -0,0 +1,2091 @@ +import functools +import inspect +import logging +import os +import sys +import typing as t +import weakref +from datetime import timedelta +from itertools import chain +from threading import Lock +from types import TracebackType + +from werkzeug.datastructures import Headers +from werkzeug.datastructures import ImmutableDict +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import BadRequestKeyError +from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import InternalServerError +from werkzeug.local import ContextVar +from werkzeug.routing import BuildError +from werkzeug.routing import Map +from werkzeug.routing import MapAdapter +from werkzeug.routing import RequestRedirect +from werkzeug.routing import RoutingException +from werkzeug.routing import Rule +from werkzeug.wrappers import Response as BaseResponse + +from . import cli +from . import json +from .config import Config +from .config import ConfigAttribute +from .ctx import _AppCtxGlobals +from .ctx import AppContext +from .ctx import RequestContext +from .globals import _request_ctx_stack +from .globals import g +from .globals import request +from .globals import session +from .helpers import _split_blueprint_path +from .helpers import get_debug_flag +from .helpers import get_env +from .helpers import get_flashed_messages +from .helpers import get_load_dotenv +from .helpers import locked_cached_property +from .helpers import url_for +from .json import jsonify +from .logging import create_logger +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import find_package +from .scaffold import Scaffold +from .scaffold import setupmethod +from .sessions import SecureCookieSessionInterface +from .signals import appcontext_tearing_down +from .signals import got_request_exception +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .templating import DispatchingJinjaLoader +from .templating import Environment +from .typing import BeforeFirstRequestCallable +from .typing import ResponseReturnValue +from .typing import TeardownCallable +from .typing import TemplateFilterCallable +from .typing import TemplateGlobalCallable +from .typing import TemplateTestCallable +from .wrappers import Request +from .wrappers import Response + +if t.TYPE_CHECKING: + import typing_extensions as te + from .blueprints import Blueprint + from .testing import FlaskClient + from .testing import FlaskCliRunner + from .typing import ErrorHandlerCallable + +if sys.version_info >= (3, 8): + iscoroutinefunction = inspect.iscoroutinefunction +else: + + def iscoroutinefunction(func: t.Any) -> bool: + while inspect.ismethod(func): + func = func.__func__ + + while isinstance(func, functools.partial): + func = func.func + + return inspect.iscoroutinefunction(func) + + +def _make_timedelta(value: t.Optional[timedelta]) -> t.Optional[timedelta]: + if value is None or isinstance(value, timedelta): + return value + + return timedelta(seconds=value) + + +class Flask(Scaffold): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: The folder with static files that is served at + ``static_url_path``. Relative to the application ``root_path`` + or an absolute path. Defaults to ``'static'``. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: The path to the root of the application files. + This should only be set manually when it can't be detected + automatically, such as for namespace packages. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute("TESTING") + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute("SECRET_KEY") + + #: The secure cookie uses this for the name of the session cookie. + #: + #: This attribute can also be configured from the config with the + #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` + session_cookie_name = ConfigAttribute("SESSION_COOKIE_NAME") + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute( + "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta + ) + + #: A :class:`~datetime.timedelta` or number of seconds which is used + #: as the default ``max_age`` for :func:`send_file`. The default is + #: ``None``, which tells the browser to use conditional requests + #: instead of a timed cache. + #: + #: Configured with the :data:`SEND_FILE_MAX_AGE_DEFAULT` + #: configuration key. + #: + #: .. versionchanged:: 2.0 + #: Defaults to ``None`` instead of 12 hours. + send_file_max_age_default = ConfigAttribute( + "SEND_FILE_MAX_AGE_DEFAULT", get_converter=_make_timedelta + ) + + #: Enable this if you want to use the X-Sendfile feature. Keep in + #: mind that the server has to support this. This only affects files + #: sent with the :func:`send_file` method. + #: + #: .. versionadded:: 0.2 + #: + #: This attribute can also be configured from the config with the + #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. + use_x_sendfile = ConfigAttribute("USE_X_SENDFILE") + + #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. + #: + #: .. versionadded:: 0.10 + json_encoder = json.JSONEncoder + + #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. + #: + #: .. versionadded:: 0.10 + json_decoder = json.JSONDecoder + + #: Options that are passed to the Jinja environment in + #: :meth:`create_jinja_environment`. Changing these options after + #: the environment is created (accessing :attr:`jinja_env`) will + #: have no effect. + #: + #: .. versionchanged:: 1.1.0 + #: This is a ``dict`` instead of an ``ImmutableDict`` to allow + #: easier configuration. + #: + jinja_options: dict = {} + + #: Default configuration parameters. + default_config = ImmutableDict( + { + "ENV": None, + "DEBUG": None, + "TESTING": False, + "PROPAGATE_EXCEPTIONS": None, + "PRESERVE_CONTEXT_ON_EXCEPTION": None, + "SECRET_KEY": None, + "PERMANENT_SESSION_LIFETIME": timedelta(days=31), + "USE_X_SENDFILE": False, + "SERVER_NAME": None, + "APPLICATION_ROOT": "/", + "SESSION_COOKIE_NAME": "session", + "SESSION_COOKIE_DOMAIN": None, + "SESSION_COOKIE_PATH": None, + "SESSION_COOKIE_HTTPONLY": True, + "SESSION_COOKIE_SECURE": False, + "SESSION_COOKIE_SAMESITE": None, + "SESSION_REFRESH_EACH_REQUEST": True, + "MAX_CONTENT_LENGTH": None, + "SEND_FILE_MAX_AGE_DEFAULT": None, + "TRAP_BAD_REQUEST_ERRORS": None, + "TRAP_HTTP_EXCEPTIONS": False, + "EXPLAIN_TEMPLATE_LOADING": False, + "PREFERRED_URL_SCHEME": "http", + "JSON_AS_ASCII": True, + "JSON_SORT_KEYS": True, + "JSONIFY_PRETTYPRINT_REGULAR": False, + "JSONIFY_MIMETYPE": "application/json", + "TEMPLATES_AUTO_RELOAD": None, + "MAX_COOKIE_SIZE": 4093, + } + ) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: The map object to use for storing the URL rules and routing + #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. + #: + #: .. versionadded:: 1.1.0 + url_map_class = Map + + #: The :meth:`test_client` method creates an instance of this test + #: client class. Defaults to :class:`~flask.testing.FlaskClient`. + #: + #: .. versionadded:: 0.7 + test_client_class: t.Optional[t.Type["FlaskClient"]] = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class: t.Optional[t.Type["FlaskCliRunner"]] = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface = SecureCookieSessionInterface() + + def __init__( + self, + import_name: str, + static_url_path: t.Optional[str] = None, + static_folder: t.Optional[t.Union[str, os.PathLike]] = "static", + static_host: t.Optional[str] = None, + host_matching: bool = False, + subdomain_matching: bool = False, + template_folder: t.Optional[str] = "templates", + instance_path: t.Optional[str] = None, + instance_relative_config: bool = False, + root_path: t.Optional[str] = None, + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + "If an instance path is provided it must be absolute." + " A relative path was given instead." + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: A list of functions that are called when :meth:`url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function registered here + #: is called with `error`, `endpoint` and `values`. If a function + #: returns ``None`` or raises a :exc:`BuildError` the next function is + #: tried. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers: t.List[ + t.Callable[[Exception, str, dict], str] + ] = [] + + #: A list of functions that will be called at the beginning of the + #: first request to this instance. To register a function, use the + #: :meth:`before_first_request` decorator. + #: + #: .. versionadded:: 0.8 + self.before_first_request_funcs: t.List[BeforeFirstRequestCallable] = [] + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs: t.List[TeardownCallable] = [] + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors: t.List[t.Callable[[], t.Dict[str, t.Any]]] = [] + + #: Maps registered blueprint names to blueprint objects. The + #: dict retains the order the blueprints were registered in. + #: Blueprints can be registered multiple times, this dict does + #: not track how often they were attached. + #: + #: .. versionadded:: 0.7 + self.blueprints: t.Dict[str, "Blueprint"] = {} + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions: dict = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = self.url_map_class() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + self._before_request_lock = Lock() + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert ( + bool(static_host) == host_matching + ), "Invalid static_host/host_matching combination" + # Use a weakref to avoid creating a reference cycle between the app + # and the view function (see #3761). + self_ref = weakref.ref(self) + self.add_url_rule( + f"{self.static_url_path}/", + endpoint="static", + host=static_host, + view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 + ) + + # Set the name of the Click group in case someone wants to add + # the app's commands to another CLI tool. + self.cli.name = self.name + + def _is_setup_finished(self) -> bool: + return self.debug and self._got_first_request + + @locked_cached_property + def name(self) -> str: # type: ignore + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == "__main__": + fn = getattr(sys.modules["__main__"], "__file__", None) + if fn is None: + return "__main__" + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self) -> bool: + """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration + value in case it's set, otherwise a sensible default is returned. + + .. versionadded:: 0.7 + """ + rv = self.config["PROPAGATE_EXCEPTIONS"] + if rv is not None: + return rv + return self.testing or self.debug + + @property + def preserve_context_on_exception(self) -> bool: + """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` + configuration value in case it's set, otherwise a sensible default + is returned. + + .. versionadded:: 0.7 + """ + rv = self.config["PRESERVE_CONTEXT_ON_EXCEPTION"] + if rv is not None: + return rv + return self.debug + + @locked_cached_property + def logger(self) -> logging.Logger: + """A standard Python :class:`~logging.Logger` for the app, with + the same name as :attr:`name`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will + be set to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be + added. See :doc:`/logging` for more information. + + .. versionchanged:: 1.1.0 + The logger takes the same name as :attr:`name` rather than + hard-coding ``"flask.app"``. + + .. versionchanged:: 1.0.0 + Behavior was simplified. The logger is always named + ``"flask.app"``. The level is only set during configuration, + it doesn't check ``app.debug`` each time. Only one format is + used, not different ones depending on ``app.debug``. No + handlers are removed, and a handler is only added if no + handlers are already configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @locked_cached_property + def jinja_env(self) -> Environment: + """The Jinja environment used to load templates. + + The environment is created the first time this property is + accessed. Changing :attr:`jinja_options` after that will have no + effect. + """ + return self.create_jinja_environment() + + @property + def got_first_request(self) -> bool: + """This attribute is set to ``True`` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative: bool = False) -> Config: + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults["ENV"] = get_env() + defaults["DEBUG"] = get_debug_flag() + return self.config_class(root_path, defaults) + + def auto_find_instance_path(self) -> str: + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, "instance") + return os.path.join(prefix, "var", f"{self.name}-instance") + + def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + @property + def templates_auto_reload(self) -> bool: + """Reload templates when they are changed. Used by + :meth:`create_jinja_environment`. + + This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If + not set, it will be enabled in debug mode. + + .. versionadded:: 1.0 + This property was added but the underlying config and behavior + already existed. + """ + rv = self.config["TEMPLATES_AUTO_RELOAD"] + return rv if rv is not None else self.debug + + @templates_auto_reload.setter + def templates_auto_reload(self, value: bool) -> None: + self.config["TEMPLATES_AUTO_RELOAD"] = value + + def create_jinja_environment(self) -> Environment: + """Create the Jinja environment based on :attr:`jinja_options` + and the various Jinja-related methods of the app. Changing + :attr:`jinja_options` after this will have no effect. Also adds + Flask-related globals and filters to the environment. + + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + + if "autoescape" not in options: + options["autoescape"] = self.select_jinja_autoescape + + if "auto_reload" not in options: + options["auto_reload"] = self.templates_auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g, + ) + rv.policies["json.dumps_function"] = json.dumps + return rv + + def create_global_jinja_loader(self) -> DispatchingJinjaLoader: + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename: str) -> bool: + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith((".html", ".htm", ".xml", ".xhtml")) + + def update_template_context(self, context: dict) -> None: + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # A template may be rendered outside a request context. + if request: + names = chain(names, reversed(request.blueprints)) + + # The values passed to render_template take precedence. Keep a + # copy to re-apply after all context functions. + orig_ctx = context.copy() + + for name in names: + if name in self.template_context_processors: + for func in self.template_context_processors[name]: + context.update(func()) + + context.update(orig_ctx) + + def make_shell_context(self) -> dict: + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {"app": self, "g": g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + #: What environment the app is running in. Flask and extensions may + #: enable behaviors based on the environment, such as enabling debug + #: mode. This maps to the :data:`ENV` config key. This is set by the + #: :envvar:`FLASK_ENV` environment variable and may not behave as + #: expected if set in code. + #: + #: **Do not enable development when deploying in production.** + #: + #: Default: ``'production'`` + env = ConfigAttribute("ENV") + + @property + def debug(self) -> bool: + """Whether debug mode is enabled. When using ``flask run`` to start + the development server, an interactive debugger will be shown for + unhandled exceptions, and the server will be reloaded when code + changes. This maps to the :data:`DEBUG` config key. This is + enabled when :attr:`env` is ``'development'`` and is overridden + by the ``FLASK_DEBUG`` environment variable. It may not behave as + expected if set in code. + + **Do not enable debug mode when deploying in production.** + + Default: ``True`` if :attr:`env` is ``'development'``, or + ``False`` otherwise. + """ + return self.config["DEBUG"] + + @debug.setter + def debug(self, value: bool) -> None: + self.config["DEBUG"] = value + self.jinja_env.auto_reload = self.templates_auto_reload + + def run( + self, + host: t.Optional[str] = None, + port: t.Optional[int] = None, + debug: t.Optional[bool] = None, + load_dotenv: bool = True, + **options: t.Any, + ) -> None: + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :doc:`/deploying/index` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` + environment variables will override :attr:`env` and + :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Change this into a no-op if the server is invoked from the + # command line. Have a look at cli.py for more information. + if os.environ.get("FLASK_RUN_FROM_CLI") == "true": + from .debughelpers import explain_ignored_app_run + + explain_ignored_app_run() + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, let env vars override previous values + if "FLASK_ENV" in os.environ: + self.env = get_env() + self.debug = get_debug_flag() + elif "FLASK_DEBUG" in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + server_name = self.config.get("SERVER_NAME") + sn_host = sn_port = None + + if server_name: + sn_host, _, sn_port = server_name.partition(":") + + if not host: + if sn_host: + host = sn_host + else: + host = "127.0.0.1" + + if port or port == 0: + port = int(port) + elif sn_port: + port = int(sn_port) + else: + port = 5000 + + options.setdefault("use_reloader", self.debug) + options.setdefault("use_debugger", self.debug) + options.setdefault("threaded", True) + + cli.show_server_banner(self.env, self.debug, self.name, False) + + from werkzeug.serving import run_simple + + try: + run_simple(t.cast(str, host), port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> "FlaskClient": + """Creates a test client for this application. For information + about unit testing head over to :doc:`/testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from .testing import FlaskClient as cls # type: ignore + return cls( # type: ignore + self, self.response_class, use_cookies=use_cookies, **kwargs + ) + + def test_cli_runner(self, **kwargs: t.Any) -> "FlaskCliRunner": + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from .testing import FlaskCliRunner as cls # type: ignore + + return cls(self, **kwargs) # type: ignore + + @setupmethod + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 0.7 + """ + blueprint.register(self, options) + + def iter_blueprints(self) -> t.ValuesView["Blueprint"]: + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return self.blueprints.values() + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[t.Callable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + options["endpoint"] = endpoint + methods = options.pop("methods", None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, "methods", None) or ("GET",) + if isinstance(methods, str): + raise TypeError( + "Allowed methods must be a list of strings, for" + ' example: @app.route(..., methods=["POST"])' + ) + methods = {item.upper() for item in methods} + + # Methods that should always be added + required_methods = set(getattr(view_func, "required_methods", ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr( + view_func, "provide_automatic_options", None + ) + + if provide_automatic_options is None: + if "OPTIONS" not in methods: + provide_automatic_options = True + required_methods.add("OPTIONS") + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options # type: ignore + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError( + "View function mapping is overwriting an existing" + f" endpoint function: {endpoint}" + ) + self.view_functions[endpoint] = view_func + + @setupmethod + def template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateFilterCallable], TemplateFilterCallable]: + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: TemplateFilterCallable) -> TemplateFilterCallable: + self.add_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_filter( + self, f: TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateTestCallable], TemplateTestCallable]: + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: TemplateTestCallable) -> TemplateTestCallable: + self.add_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_test( + self, f: TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateGlobalCallable], TemplateGlobalCallable]: + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + + def decorator(f: TemplateGlobalCallable) -> TemplateGlobalCallable: + self.add_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_global( + self, f: TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_first_request( + self, f: BeforeFirstRequestCallable + ) -> BeforeFirstRequestCallable: + """Registers a function to be run before the first request to this + instance of the application. + + The function will be called without any arguments and its return + value is ignored. + + .. versionadded:: 0.8 + """ + self.before_first_request_funcs.append(f) + return f + + @setupmethod + def teardown_appcontext(self, f: TeardownCallable) -> TeardownCallable: + """Registers a function to be called when the application context + ends. These functions are typically also called when the request + context is popped. + + Example:: + + ctx = app.app_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the app context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Since a request context typically also manages an application + context it would also be called when you pop a request context. + + When a teardown function was called because of an unhandled exception + it will be passed an error object. If an :meth:`errorhandler` is + registered, it will handle the exception and the teardown will not + receive it. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def shell_context_processor(self, f: t.Callable) -> t.Callable: + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + def _find_error_handler( + self, e: Exception + ) -> t.Optional["ErrorHandlerCallable[Exception]"]: + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + names = (*request.blueprints, None) + + for c in (code, None) if code is not None else (None,): + for name in names: + handler_map = self.error_handler_spec[name][c] + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + return None + + def handle_http_exception( + self, e: HTTPException + ) -> t.Union[HTTPException, ResponseReturnValue]: + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionchanged:: 1.0.3 + ``RoutingException``, used internally for actions such as + slash redirects during routing, is not passed to error + handlers. + + .. versionchanged:: 1.0 + Exceptions are looked up by code *and* by MRO, so + ``HTTPException`` subclasses can be handled with a catch-all + handler for the base ``HTTPException``. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + # RoutingExceptions are used internally to trigger routing + # actions, such as slash redirects raising RequestRedirect. They + # are not raised or handled in user code. + if isinstance(e, RoutingException): + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return self.ensure_sync(handler)(e) + + def trap_http_exception(self, e: Exception) -> bool: + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config["TRAP_HTTP_EXCEPTIONS"]: + return True + + trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None + and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception( + self, e: Exception + ) -> t.Union[HTTPException, ResponseReturnValue]: + """This method is called whenever an exception occurs that + should be handled. A special case is :class:`~werkzeug + .exceptions.HTTPException` which is forwarded to the + :meth:`handle_http_exception` method. This function will either + return a response value or reraise the exception with the same + traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the + bad key in debug mode rather than a generic bad request + message. + + .. versionadded:: 0.7 + """ + if isinstance(e, BadRequestKeyError) and ( + self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] + ): + e.show_exception = True + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + raise + + return self.ensure_sync(handler)(e) + + def handle_exception(self, e: Exception) -> Response: + """Handle an exception that did not have an error handler + associated with it, or that was raised from an error handler. + This always causes a 500 ``InternalServerError``. + + Always sends the :data:`got_request_exception` signal. + + If :attr:`propagate_exceptions` is ``True``, such as in debug + mode, the error will be re-raised so that the debugger can + display it. Otherwise, the original exception is logged, and + an :exc:`~werkzeug.exceptions.InternalServerError` is returned. + + If an error handler is registered for ``InternalServerError`` or + ``500``, it will be used. For consistency, the handler will + always receive the ``InternalServerError``. The original + unhandled exception is available as ``e.original_exception``. + + .. versionchanged:: 1.1.0 + Always passes the ``InternalServerError`` instance to the + handler, setting ``original_exception`` to the unhandled + error. + + .. versionchanged:: 1.1.0 + ``after_request`` functions and other finalization is done + even for the default 500 response when there is no handler. + + .. versionadded:: 0.3 + """ + exc_info = sys.exc_info() + got_request_exception.send(self, exception=e) + + if self.propagate_exceptions: + # Re-raise if called with an active exception, otherwise + # raise the passed in exception. + if exc_info[1] is e: + raise + + raise e + + self.log_exception(exc_info) + server_error: t.Union[InternalServerError, ResponseReturnValue] + server_error = InternalServerError(original_exception=e) + handler = self._find_error_handler(server_error) + + if handler is not None: + server_error = self.ensure_sync(handler)(server_error) + + return self.finalize_request(server_error, from_error_handler=True) + + def log_exception( + self, + exc_info: t.Union[ + t.Tuple[type, BaseException, TracebackType], t.Tuple[None, None, None] + ], + ) -> None: + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error( + f"Exception on {request.path} [{request.method}]", exc_info=exc_info + ) + + def raise_routing_exception(self, request: Request) -> "te.NoReturn": + """Exceptions that are recording during routing are reraised with + this method. During debug we are not reraising redirect requests + for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising + a different error instead to help debug situations. + + :internal: + """ + if ( + not self.debug + or not isinstance(request.routing_exception, RequestRedirect) + or request.method in ("GET", "HEAD", "OPTIONS") + ): + raise request.routing_exception # type: ignore + + from .debughelpers import FormDataRoutingRedirect + + raise FormDataRoutingRedirect(request) + + def dispatch_request(self) -> ResponseReturnValue: + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = _request_ctx_stack.top.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule = req.url_rule + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if ( + getattr(rule, "provide_automatic_options", False) + and req.method == "OPTIONS" + ): + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + return self.ensure_sync(self.view_functions[rule.endpoint])(**req.view_args) + + def full_dispatch_request(self) -> Response: + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + self.try_trigger_before_first_request_functions() + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request( + self, + rv: t.Union[ResponseReturnValue, HTTPException], + from_error_handler: bool = False, + ) -> Response: + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send(self, response=response) + except Exception: + if not from_error_handler: + raise + self.logger.exception( + "Request finalizing failed with an error while handling an error" + ) + return response + + def try_trigger_before_first_request_functions(self) -> None: + """Called before each request and will ensure that it triggers + the :attr:`before_first_request_funcs` and only exactly once per + application instance (which means process usually). + + :internal: + """ + if self._got_first_request: + return + with self._before_request_lock: + if self._got_first_request: + return + for func in self.before_first_request_funcs: + self.ensure_sync(func)() + self._got_first_request = True + + def make_default_options_response(self) -> Response: + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = _request_ctx_stack.top.url_adapter + methods = adapter.allowed_methods() + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error: t.Optional[BaseException]) -> bool: + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def ensure_sync(self, func: t.Callable) -> t.Callable: + """Ensure that the function is synchronous for WSGI workers. + Plain ``def`` functions are returned as-is. ``async def`` + functions are wrapped to run and wait for the response. + + Override this method to change how the app runs async views. + + .. versionadded:: 2.0 + """ + if iscoroutinefunction(func): + return self.async_to_sync(func) + + return func + + def async_to_sync( + self, func: t.Callable[..., t.Coroutine] + ) -> t.Callable[..., t.Any]: + """Return a sync function that will run the coroutine function. + + .. code-block:: python + + result = app.async_to_sync(func)(*args, **kwargs) + + Override this method to change how the app converts async code + to be synchronously callable. + + .. versionadded:: 2.0 + """ + try: + from asgiref.sync import async_to_sync as asgiref_async_to_sync + except ImportError: + raise RuntimeError( + "Install Flask with the 'async' extra in order to use async views." + ) from None + + # Check that Werkzeug isn't using its fallback ContextVar class. + if ContextVar.__module__ == "werkzeug.local": + raise RuntimeError( + "Async cannot be used with this combination of Python " + "and Greenlet versions." + ) + + return asgiref_async_to_sync(func) + + def make_response(self, rv: ResponseReturnValue) -> Response: + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` + A response object is created with the bytes as the body. + + ``dict`` + A dictionary that will be jsonify'd before being returned. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv + # other sized tuples are not allowed + else: + raise TypeError( + "The view function did not return a valid response tuple." + " The tuple must have the form (body, status, headers)," + " (body, status), or (body, headers)." + ) + + # the body must not be None + if rv is None: + raise TypeError( + f"The view function for {request.endpoint!r} did not" + " return a valid response. The function either returned" + " None or ended without a return statement." + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (str, bytes, bytearray)): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class(rv, status=status, headers=headers) + status = headers = None + elif isinstance(rv, dict): + rv = jsonify(rv) + elif isinstance(rv, BaseResponse) or callable(rv): + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type(rv, request.environ) # type: ignore # noqa: B950 + except TypeError as e: + raise TypeError( + f"{e}\nThe view function did not return a valid" + " response. The return type must be a string," + " dict, tuple, Response instance, or WSGI" + f" callable, but it was a {type(rv).__name__}." + ).with_traceback(sys.exc_info()[2]) from None + else: + raise TypeError( + "The view function did not return a valid" + " response. The return type must be a string," + " dict, tuple, Response instance, or WSGI" + f" callable, but it was a {type(rv).__name__}." + ) + + rv = t.cast(Response, rv) + # prefer the status if it was provided + if status is not None: + if isinstance(status, (str, bytes, bytearray)): + rv.status = status # type: ignore + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.update(headers) + + return rv + + def create_url_adapter( + self, request: t.Optional[Request] + ) -> t.Optional[MapAdapter]: + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + if not self.subdomain_matching: + subdomain = self.url_map.default_subdomain or None + else: + subdomain = None + + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config["SERVER_NAME"], + subdomain=subdomain, + ) + # We need at the very least the server name to be set for this + # to work. + if self.config["SERVER_NAME"] is not None: + return self.url_map.bind( + self.config["SERVER_NAME"], + script_name=self.config["APPLICATION_ROOT"], + url_scheme=self.config["PREFERRED_URL_SCHEME"], + ) + + return None + + def inject_url_defaults(self, endpoint: str, values: dict) -> None: + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # url_for may be called outside a request context, parse the + # passed endpoint instead of using request.blueprints. + if "." in endpoint: + names = chain( + names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) + ) + + for name in names: + if name in self.url_default_functions: + for func in self.url_default_functions[name]: + func(endpoint, values) + + def handle_url_build_error( + self, error: Exception, endpoint: str, values: dict + ) -> str: + """Handle :class:`~werkzeug.routing.BuildError` on + :meth:`url_for`. + """ + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + except BuildError as e: + # make error available outside except block + error = e + else: + if rv is not None: + return rv + + # Re-raise if called with an active exception, otherwise raise + # the passed in exception. + if error is sys.exc_info()[1]: + raise + + raise error + + def preprocess_request(self) -> t.Optional[ResponseReturnValue]: + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + names = (None, *reversed(request.blueprints)) + + for name in names: + if name in self.url_value_preprocessors: + for url_func in self.url_value_preprocessors[name]: + url_func(request.endpoint, request.view_args) + + for name in names: + if name in self.before_request_funcs: + for before_func in self.before_request_funcs[name]: + rv = self.ensure_sync(before_func)() + + if rv is not None: + return rv + + return None + + def process_response(self, response: Response) -> Response: + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = _request_ctx_stack.top + + for func in ctx._after_request_functions: + response = self.ensure_sync(func)(response) + + for name in chain(request.blueprints, (None,)): + if name in self.after_request_funcs: + for func in reversed(self.after_request_funcs[name]): + response = self.ensure_sync(func)(response) + + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + + return response + + def do_teardown_request( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() `, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for name in chain(request.blueprints, (None,)): + if name in self.teardown_request_funcs: + for func in reversed(self.teardown_request_funcs[name]): + self.ensure_sync(func)(exc) + + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() `. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for func in reversed(self.teardown_appcontext_funcs): + self.ensure_sync(func)(exc) + + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self) -> AppContext: + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() ` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ: dict) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from .testing import EnvironBuilder + + builder = EnvironBuilder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error: t.Optional[BaseException] = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: # noqa: B001 + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if self.should_ignore_error(error): + error = None + ctx.auto_pop(error) + + def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app`, which can be + wrapped to apply middleware. + """ + return self.wsgi_app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/flask/blueprints.py b/venv/lib/python3.8/site-packages/flask/blueprints.py new file mode 100644 index 0000000..5c23a73 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/blueprints.py @@ -0,0 +1,609 @@ +import os +import typing as t +from collections import defaultdict +from functools import update_wrapper + +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import Scaffold +from .typing import AfterRequestCallable +from .typing import BeforeFirstRequestCallable +from .typing import BeforeRequestCallable +from .typing import TeardownCallable +from .typing import TemplateContextProcessorCallable +from .typing import TemplateFilterCallable +from .typing import TemplateGlobalCallable +from .typing import TemplateTestCallable +from .typing import URLDefaultCallable +from .typing import URLValuePreprocessorCallable + +if t.TYPE_CHECKING: + from .app import Flask + from .typing import ErrorHandlerCallable + +DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] + + +class BlueprintSetupState: + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__( + self, + blueprint: "Blueprint", + app: "Flask", + options: t.Any, + first_registration: bool, + ) -> None: + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get("subdomain") + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get("url_prefix") + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + self.name = self.options.get("name", blueprint.name) + self.name_prefix = self.options.get("name_prefix", "") + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get("url_defaults", ())) + + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[t.Callable] = None, + **options: t.Any, + ) -> None: + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) + else: + rule = self.url_prefix + options.setdefault("subdomain", self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + defaults = self.url_defaults + if "defaults" in options: + defaults = dict(defaults, **options.pop("defaults")) + + self.app.add_url_rule( + rule, + f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), + view_func, + defaults=defaults, + **options, + ) + + +class Blueprint(Scaffold): + """Represents a blueprint, a collection of routes and other + app-related functions that can be registered on a real application + later. + + A blueprint is an object that allows defining application functions + without requiring an application object ahead of time. It uses the + same decorators as :class:`~flask.Flask`, but defers the need for an + application by recording them for later registration. + + Decorating a function with a blueprint creates a deferred function + that is called with :class:`~flask.blueprints.BlueprintSetupState` + when the blueprint is registered on an application. + + See :doc:`/blueprints` for more information. + + :param name: The name of the blueprint. Will be prepended to each + endpoint name. + :param import_name: The name of the blueprint package, usually + ``__name__``. This helps locate the ``root_path`` for the + blueprint. + :param static_folder: A folder with static files that should be + served by the blueprint's static route. The path is relative to + the blueprint's root path. Blueprint static files are disabled + by default. + :param static_url_path: The url to serve static files from. + Defaults to ``static_folder``. If the blueprint does not have + a ``url_prefix``, the app's static route will take precedence, + and the blueprint's static files won't be accessible. + :param template_folder: A folder with templates that should be added + to the app's template search path. The path is relative to the + blueprint's root path. Blueprint templates are disabled by + default. Blueprint templates have a lower precedence than those + in the app's templates folder. + :param url_prefix: A path to prepend to all of the blueprint's URLs, + to make them distinct from the rest of the app's routes. + :param subdomain: A subdomain that blueprint routes will match on by + default. + :param url_defaults: A dict of default values that blueprint routes + will receive by default. + :param root_path: By default, the blueprint will automatically set + this based on ``import_name``. In certain situations this + automatic detection can fail, so the path can be specified + manually instead. + + .. versionchanged:: 1.1.0 + Blueprints have a ``cli`` group to register nested CLI commands. + The ``cli_group`` parameter controls the name of the group under + the ``flask`` command. + + .. versionadded:: 0.7 + """ + + warn_on_modifications = False + _got_registered_once = False + + #: Blueprint local JSON encoder class to use. Set to ``None`` to use + #: the app's :class:`~flask.Flask.json_encoder`. + json_encoder = None + #: Blueprint local JSON decoder class to use. Set to ``None`` to use + #: the app's :class:`~flask.Flask.json_decoder`. + json_decoder = None + + def __init__( + self, + name: str, + import_name: str, + static_folder: t.Optional[t.Union[str, os.PathLike]] = None, + static_url_path: t.Optional[str] = None, + template_folder: t.Optional[str] = None, + url_prefix: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + url_defaults: t.Optional[dict] = None, + root_path: t.Optional[str] = None, + cli_group: t.Optional[str] = _sentinel, # type: ignore + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if "." in name: + raise ValueError("'name' may not contain a dot '.' character.") + + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.deferred_functions: t.List[DeferredSetupFunction] = [] + + if url_defaults is None: + url_defaults = {} + + self.url_values_defaults = url_defaults + self.cli_group = cli_group + self._blueprints: t.List[t.Tuple["Blueprint", dict]] = [] + + def _is_setup_finished(self) -> bool: + return self.warn_on_modifications and self._got_registered_once + + def record(self, func: t.Callable) -> None: + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + if self._got_registered_once and self.warn_on_modifications: + from warnings import warn + + warn( + Warning( + "The blueprint was already registered once but is" + " getting modified now. These changes will not show" + " up." + ) + ) + self.deferred_functions.append(func) + + def record_once(self, func: t.Callable) -> None: + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + + def wrapper(state: BlueprintSetupState) -> None: + if state.first_registration: + func(state) + + return self.record(update_wrapper(wrapper, func)) + + def make_setup_state( + self, app: "Flask", options: dict, first_registration: bool = False + ) -> BlueprintSetupState: + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on this blueprint. Keyword + arguments passed to this method will override the defaults set + on the blueprint. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 2.0 + """ + if blueprint is self: + raise ValueError("Cannot register a blueprint on itself") + self._blueprints.append((blueprint, options)) + + def register(self, app: "Flask", options: dict) -> None: + """Called by :meth:`Flask.register_blueprint` to register all + views and callbacks registered on the blueprint with the + application. Creates a :class:`.BlueprintSetupState` and calls + each :meth:`record` callback with it. + + :param app: The application this blueprint is being registered + with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + + .. versionchanged:: 2.0.1 + Nested blueprints are registered with their dotted name. + This allows different blueprints with the same name to be + nested at different locations. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionchanged:: 2.0.1 + Registering the same blueprint with the same name multiple + times is deprecated and will become an error in Flask 2.1. + """ + name_prefix = options.get("name_prefix", "") + self_name = options.get("name", self.name) + name = f"{name_prefix}.{self_name}".lstrip(".") + + if name in app.blueprints: + existing_at = f" '{name}'" if self_name != name else "" + + if app.blueprints[name] is not self: + raise ValueError( + f"The name '{self_name}' is already registered for" + f" a different blueprint{existing_at}. Use 'name='" + " to provide a unique name." + ) + else: + import warnings + + warnings.warn( + f"The name '{self_name}' is already registered for" + f" this blueprint{existing_at}. Use 'name=' to" + " provide a unique name. This will become an error" + " in Flask 2.1.", + stacklevel=4, + ) + + first_bp_registration = not any(bp is self for bp in app.blueprints.values()) + first_name_registration = name not in app.blueprints + + app.blueprints[name] = self + self._got_registered_once = True + state = self.make_setup_state(app, options, first_bp_registration) + + if self.has_static_folder: + state.add_url_rule( + f"{self.static_url_path}/", + view_func=self.send_static_file, + endpoint="static", + ) + + # Merge blueprint data into parent. + if first_bp_registration or first_name_registration: + + def extend(bp_dict, parent_dict): + for key, values in bp_dict.items(): + key = name if key is None else f"{name}.{key}" + parent_dict[key].extend(values) + + for key, value in self.error_handler_spec.items(): + key = name if key is None else f"{name}.{key}" + value = defaultdict( + dict, + { + code: { + exc_class: func for exc_class, func in code_values.items() + } + for code, code_values in value.items() + }, + ) + app.error_handler_spec[key] = value + + for endpoint, func in self.view_functions.items(): + app.view_functions[endpoint] = func + + extend(self.before_request_funcs, app.before_request_funcs) + extend(self.after_request_funcs, app.after_request_funcs) + extend( + self.teardown_request_funcs, + app.teardown_request_funcs, + ) + extend(self.url_default_functions, app.url_default_functions) + extend(self.url_value_preprocessors, app.url_value_preprocessors) + extend(self.template_context_processors, app.template_context_processors) + + for deferred in self.deferred_functions: + deferred(state) + + cli_resolved_group = options.get("cli_group", self.cli_group) + + if self.cli.commands: + if cli_resolved_group is None: + app.cli.commands.update(self.cli.commands) + elif cli_resolved_group is _sentinel: + self.cli.name = name + app.cli.add_command(self.cli) + else: + self.cli.name = cli_resolved_group + app.cli.add_command(self.cli) + + for blueprint, bp_options in self._blueprints: + bp_options = bp_options.copy() + bp_url_prefix = bp_options.get("url_prefix") + + if bp_url_prefix is None: + bp_url_prefix = blueprint.url_prefix + + if state.url_prefix is not None and bp_url_prefix is not None: + bp_options["url_prefix"] = ( + state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") + ) + elif bp_url_prefix is not None: + bp_options["url_prefix"] = bp_url_prefix + elif state.url_prefix is not None: + bp_options["url_prefix"] = state.url_prefix + + bp_options["name_prefix"] = name + blueprint.register(app, bp_options) + + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[t.Callable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint and "." in endpoint: + raise ValueError("'endpoint' may not contain a dot '.' character.") + + if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: + raise ValueError("'view_func' name may not contain a dot '.' character.") + + self.record( + lambda s: s.add_url_rule( + rule, + endpoint, + view_func, + provide_automatic_options=provide_automatic_options, + **options, + ) + ) + + def app_template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateFilterCallable], TemplateFilterCallable]: + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: TemplateFilterCallable) -> TemplateFilterCallable: + self.add_app_template_filter(f, name=name) + return f + + return decorator + + def add_app_template_filter( + self, f: TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.filters[name or f.__name__] = f + + self.record_once(register_template) + + def app_template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateTestCallable], TemplateTestCallable]: + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: TemplateTestCallable) -> TemplateTestCallable: + self.add_app_template_test(f, name=name) + return f + + return decorator + + def add_app_template_test( + self, f: TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.tests[name or f.__name__] = f + + self.record_once(register_template) + + def app_template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[TemplateGlobalCallable], TemplateGlobalCallable]: + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def decorator(f: TemplateGlobalCallable) -> TemplateGlobalCallable: + self.add_app_template_global(f, name=name) + return f + + return decorator + + def add_app_template_global( + self, f: TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.globals[name or f.__name__] = f + + self.record_once(register_template) + + def before_app_request(self, f: BeforeRequestCallable) -> BeforeRequestCallable: + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) + ) + return f + + def before_app_first_request( + self, f: BeforeFirstRequestCallable + ) -> BeforeFirstRequestCallable: + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + """ + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + def after_app_request(self, f: AfterRequestCallable) -> AfterRequestCallable: + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) + ) + return f + + def teardown_app_request(self, f: TeardownCallable) -> TeardownCallable: + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) + ) + return f + + def app_context_processor( + self, f: TemplateContextProcessorCallable + ) -> TemplateContextProcessorCallable: + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault(None, []).append(f) + ) + return f + + def app_errorhandler(self, code: t.Union[t.Type[Exception], int]) -> t.Callable: + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + + def decorator( + f: "ErrorHandlerCallable[Exception]", + ) -> "ErrorHandlerCallable[Exception]": + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + + return decorator + + def app_url_value_preprocessor( + self, f: URLValuePreprocessorCallable + ) -> URLValuePreprocessorCallable: + """Same as :meth:`url_value_preprocessor` but application wide.""" + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) + ) + return f + + def app_url_defaults(self, f: URLDefaultCallable) -> URLDefaultCallable: + """Same as :meth:`url_defaults` but application wide.""" + self.record_once( + lambda s: s.app.url_default_functions.setdefault(None, []).append(f) + ) + return f diff --git a/venv/lib/python3.8/site-packages/flask/cli.py b/venv/lib/python3.8/site-packages/flask/cli.py new file mode 100644 index 0000000..7ab4fa1 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/cli.py @@ -0,0 +1,998 @@ +import ast +import inspect +import os +import platform +import re +import sys +import traceback +import warnings +from functools import update_wrapper +from operator import attrgetter +from threading import Lock +from threading import Thread + +import click +from werkzeug.utils import import_string + +from .globals import current_app +from .helpers import get_debug_flag +from .helpers import get_env +from .helpers import get_load_dotenv + +try: + import dotenv +except ImportError: + dotenv = None + +try: + import ssl +except ImportError: + ssl = None # type: ignore + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(script_info, module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ("app", "application"): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + "Detected multiple Flask applications in module" + f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'" + f" to specify the correct one." + ) + + # Search for app factory functions. + for attr_name in ("create_app", "make_app"): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = call_factory(script_info, app_factory) + + if isinstance(app, Flask): + return app + except TypeError as e: + if not _called_with_wrong_args(app_factory): + raise + + raise NoAppException( + f"Detected factory {attr_name!r} in module {module.__name__!r}," + " but could not call it without arguments. Use" + f" \"FLASK_APP='{module.__name__}:{attr_name}(args)'\"" + " to specify arguments." + ) from e + + raise NoAppException( + "Failed to find Flask application or factory in module" + f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'" + " to specify one." + ) + + +def call_factory(script_info, app_factory, args=None, kwargs=None): + """Takes an app factory, a ``script_info` object and optionally a tuple + of arguments. Checks for the existence of a script_info argument and calls + the app_factory depending on that and the arguments provided. + """ + sig = inspect.signature(app_factory) + args = [] if args is None else args + kwargs = {} if kwargs is None else kwargs + + if "script_info" in sig.parameters: + warnings.warn( + "The 'script_info' argument is deprecated and will not be" + " passed to the app factory function in Flask 2.1.", + DeprecationWarning, + ) + kwargs["script_info"] = script_info + + if not args and len(sig.parameters) == 1: + first_parameter = next(iter(sig.parameters.values())) + + if ( + first_parameter.default is inspect.Parameter.empty + # **kwargs is reported as an empty default, ignore it + and first_parameter.kind is not inspect.Parameter.VAR_KEYWORD + ): + warnings.warn( + "Script info is deprecated and will not be passed as the" + " single argument to the app factory function in Flask" + " 2.1.", + DeprecationWarning, + ) + args.append(script_info) + + return app_factory(*args, **kwargs) + + +def _called_with_wrong_args(f): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param f: The function that was called. + :return: ``True`` if the call failed. + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is f.__code__: + # In the function, it was called successfully. + return False + + tb = tb.tb_next + + # Didn't reach the function. + return True + finally: + # Delete tb to break a circular reference. + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def find_app_by_string(script_info, module, app_name): + """Check if the given string is a variable name or a function. Call + a function to get the app instance, or return the variable directly. + """ + from . import Flask + + # Parse app_name as a single expression to determine if it's a valid + # attribute name or function call. + try: + expr = ast.parse(app_name.strip(), mode="eval").body + except SyntaxError: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) from None + + if isinstance(expr, ast.Name): + name = expr.id + args = kwargs = None + elif isinstance(expr, ast.Call): + # Ensure the function name is an attribute name only. + if not isinstance(expr.func, ast.Name): + raise NoAppException( + f"Function reference must be a simple name: {app_name!r}." + ) + + name = expr.func.id + + # Parse the positional and keyword arguments as literals. + try: + args = [ast.literal_eval(arg) for arg in expr.args] + kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} + except ValueError: + # literal_eval gives cryptic error messages, show a generic + # message with the full expression instead. + raise NoAppException( + f"Failed to parse arguments as literal values: {app_name!r}." + ) from None + else: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException( + f"Failed to find attribute {name!r} in {module.__name__!r}." + ) from e + + # If the attribute is a function, call it with any args and kwargs + # to get the real application. + if inspect.isfunction(attr): + try: + app = call_factory(script_info, attr, args, kwargs) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + f"The factory {app_name!r} in module" + f" {module.__name__!r} could not be called with the" + " specified arguments." + ) from e + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + "A valid Flask application was not obtained from" + f" '{module.__name__}:{app_name}'." + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + fname, ext = os.path.splitext(path) + if ext == ".py": + path = fname + + if os.path.basename(path) == "__init__": + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, "__init__.py")): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return ".".join(module_name[::-1]) + + +def locate_app(script_info, module_name, app_name, raise_if_not_found=True): + __traceback_hide__ = True # noqa: F841 + + try: + __import__(module_name) + except ImportError as e: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[2].tb_next: + raise NoAppException( + f"While importing {module_name!r}, an ImportError was raised." + ) from e + elif raise_if_not_found: + raise NoAppException(f"Could not import {module_name!r}.") from e + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(script_info, module) + else: + return find_app_by_string(script_info, module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + import werkzeug + from . import __version__ + + click.echo( + f"Python {platform.python_version()}\n" + f"Flask {__version__}\n" + f"Werkzeug {werkzeug.__version__}", + color=ctx.color, + ) + ctx.exit() + + +version_option = click.Option( + ["--version"], + help="Show the flask version", + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True, +) + + +class DispatchingApp: + """Special application that dispatches to a Flask application which + is imported by name in a background thread. If an error happens + it is recorded and shown as part of the WSGI handling which in case + of the Werkzeug debugger means that it shows up in the browser. + """ + + def __init__(self, loader, use_eager_loading=None): + self.loader = loader + self._app = None + self._lock = Lock() + self._bg_loading_exc = None + + if use_eager_loading is None: + use_eager_loading = os.environ.get("WERKZEUG_RUN_MAIN") != "true" + + if use_eager_loading: + self._load_unlocked() + else: + self._load_in_background() + + def _load_in_background(self): + def _load_app(): + __traceback_hide__ = True # noqa: F841 + with self._lock: + try: + self._load_unlocked() + except Exception as e: + self._bg_loading_exc = e + + t = Thread(target=_load_app, args=()) + t.start() + + def _flush_bg_loading_exception(self): + __traceback_hide__ = True # noqa: F841 + exc = self._bg_loading_exc + + if exc is not None: + self._bg_loading_exc = None + raise exc + + def _load_unlocked(self): + __traceback_hide__ = True # noqa: F841 + self._app = rv = self.loader() + self._bg_loading_exc = None + return rv + + def __call__(self, environ, start_response): + __traceback_hide__ = True # noqa: F841 + if self._app is not None: + return self._app(environ, start_response) + self._flush_bg_loading_exception() + with self._lock: + if self._app is not None: + rv = self._app + else: + rv = self._load_unlocked() + return rv(environ, start_response) + + +class ScriptInfo: + """Helper object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True): + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path or os.environ.get("FLASK_APP") + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data = {} + self.set_debug_flag = set_debug_flag + self._loaded_app = None + + def load_app(self): + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + __traceback_hide__ = True # noqa: F841 + + if self._loaded_app is not None: + return self._loaded_app + + if self.create_app is not None: + app = call_factory(self, self.create_app) + else: + if self.app_import_path: + path, name = ( + re.split(r":(?![\\/])", self.app_import_path, 1) + [None] + )[:2] + import_name = prepare_import(path) + app = locate_app(self, import_name, name) + else: + for path in ("wsgi.py", "app.py"): + import_name = prepare_import(path) + app = locate_app(self, import_name, None, raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + "Could not locate a Flask application. You did not provide " + 'the "FLASK_APP" environment variable, and a "wsgi.py" or ' + '"app.py" module was not found in the current directory.' + ) + + if self.set_debug_flag: + # Update the app's debug flag through the descriptor so that + # other values repopulate as well. + app.debug = get_debug_flag() + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. If callbacks are registered directly + to the ``app.cli`` object then they are wrapped with this function + by default unless it's disabled. + """ + + @click.pass_context + def decorator(__ctx, *args, **kwargs): + with __ctx.ensure_object(ScriptInfo).load_app().app_context(): + return __ctx.invoke(f, *args, **kwargs) + + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop("with_appcontext", True) + + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault("cls", AppGroup) + return click.Group.group(self, *args, **kwargs) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands will be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param set_debug_flag: Set the app's debug flag based on the active + environment + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__( + self, + add_default_commands=True, + create_app=None, + add_version_option=True, + load_dotenv=True, + set_debug_flag=True, + **extra, + ): + params = list(extra.pop("params", None) or ()) + + if add_version_option: + params.append(version_option) + + AppGroup.__init__(self, params=params, **extra) + self.create_app = create_app + self.load_dotenv = load_dotenv + self.set_debug_flag = set_debug_flag + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + try: + import pkg_resources + except ImportError: + self._loaded_plugin_commands = True + return + + for ep in pkg_resources.iter_entry_points("flask.commands"): + self.add_command(ep.load(), ep.name) + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + # Look up built-in and plugin commands, which should be + # available even if the app fails to load. + rv = super().get_command(ctx, name) + + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + + # Look up commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + return info.load_app().cli.get_command(ctx, name) + except NoAppException as e: + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + + def list_commands(self, ctx): + self._load_plugin_commands() + # Start with the built-in and plugin commands. + rv = set(super().list_commands(ctx)) + info = ctx.ensure_object(ScriptInfo) + + # Add commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except NoAppException as e: + # When an app couldn't be loaded, show the error message + # without the traceback. + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + except Exception: + # When any other errors occurred during loading, show the + # full traceback. + click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") + + return sorted(rv) + + def main(self, *args, **kwargs): + # Set a global flag that indicates that we were invoked from the + # command line interface. This is detected by Flask.run to make the + # call into a no-op. This is necessary to avoid ugly errors when the + # script that is loaded here also attempts to start a server. + os.environ["FLASK_RUN_FROM_CLI"] = "true" + + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + obj = kwargs.get("obj") + + if obj is None: + obj = ScriptInfo( + create_app=self.create_app, set_debug_flag=self.set_debug_flag + ) + + kwargs["obj"] = obj + kwargs.setdefault("auto_envvar_prefix", "FLASK") + return super().main(*args, **kwargs) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other + + +def load_dotenv(path=None): + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionchanged:: 1.1.0 + Returns ``False`` when python-dotenv is not installed, or when + the given path isn't a file. + + .. versionchanged:: 2.0 + When loading the env files, set the default encoding to UTF-8. + + .. versionadded:: 1.0 + """ + if dotenv is None: + if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): + click.secho( + " * Tip: There are .env or .flaskenv files present." + ' Do "pip install python-dotenv" to use them.', + fg="yellow", + err=True, + ) + + return False + + # if the given path specifies the actual file then return True, + # else False + if path is not None: + if os.path.isfile(path): + return dotenv.load_dotenv(path, encoding="utf-8") + + return False + + new_dir = None + + for name in (".env", ".flaskenv"): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + if new_dir is None: + new_dir = os.path.dirname(path) + + dotenv.load_dotenv(path, encoding="utf-8") + + return new_dir is not None # at least one file was located and loaded + + +def show_server_banner(env, debug, app_import_path, eager_loading): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if os.environ.get("WERKZEUG_RUN_MAIN") == "true": + return + + if app_import_path is not None: + message = f" * Serving Flask app {app_import_path!r}" + + if not eager_loading: + message += " (lazy loading)" + + click.echo(message) + + click.echo(f" * Environment: {env}") + + if env == "production": + click.secho( + " WARNING: This is a development server. Do not use it in" + " a production deployment.", + fg="red", + ) + click.secho(" Use a production WSGI server instead.", dim=True) + + if debug is not None: + click.echo(f" * Debug mode: {'on' if debug else 'off'}") + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = "path" + + def __init__(self): + self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + if ssl is None: + raise click.BadParameter( + 'Using "--cert" requires Python to be compiled with SSL support.', + ctx, + param, + ) + + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == "adhoc": + try: + import cryptography # noqa: F401 + except ImportError: + raise click.BadParameter( + "Using ad-hoc certificates requires the cryptography library.", + ctx, + param, + ) from None + + return value + + obj = import_string(value, silent=True) + + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get("cert") + is_adhoc = cert == "adhoc" + is_context = ssl and isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', ctx, param + ) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param + ) + + if not cert: + raise click.BadParameter('"--cert" must also be specified.', ctx, param) + + ctx.params["cert"] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter('Required when using "--cert".', ctx, param) + + return value + + +class SeparatedPathType(click.Path): + """Click option type that accepts a list of values separated by the + OS's path separator (``:``, ``;`` on Windows). Each value is + validated as a :class:`click.Path` type. + """ + + def convert(self, value, param, ctx): + items = self.split_envvar_value(value) + super_convert = super().convert + return [super_convert(item, param, ctx) for item in items] + + +@click.command("run", short_help="Run a development server.") +@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") +@click.option("--port", "-p", default=5000, help="The port to bind to.") +@click.option( + "--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS." +) +@click.option( + "--key", + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, + expose_value=False, + help="The key file to use when specifying a certificate.", +) +@click.option( + "--reload/--no-reload", + default=None, + help="Enable or disable the reloader. By default the reloader " + "is active if debug is enabled.", +) +@click.option( + "--debugger/--no-debugger", + default=None, + help="Enable or disable the debugger. By default the debugger " + "is active if debug is enabled.", +) +@click.option( + "--eager-loading/--lazy-loading", + default=None, + help="Enable or disable eager loading. By default eager " + "loading is enabled if the reloader is disabled.", +) +@click.option( + "--with-threads/--without-threads", + default=True, + help="Enable or disable multithreading.", +) +@click.option( + "--extra-files", + default=None, + type=SeparatedPathType(), + help=( + "Extra files that trigger a reload on change. Multiple paths" + f" are separated by {os.path.pathsep!r}." + ), +) +@pass_script_info +def run_command( + info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files +): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default if + FLASK_ENV=development or FLASK_DEBUG=1. + """ + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + show_server_banner(get_env(), debug, info.app_import_path, eager_loading) + app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) + + from werkzeug.serving import run_simple + + run_simple( + host, + port, + app, + use_reloader=reload, + use_debugger=debugger, + threaded=with_threads, + ssl_context=cert, + extra_files=extra_files, + ) + + +@click.command("shell", short_help="Run a shell in the app context.") +@with_appcontext +def shell_command() -> None: + """Run an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to its configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + from .globals import _app_ctx_stack + + app = _app_ctx_stack.top.app + banner = ( + f"Python {sys.version} on {sys.platform}\n" + f"App: {app.import_name} [{app.env}]\n" + f"Instance: {app.instance_path}" + ) + ctx: dict = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get("PYTHONSTARTUP") + if startup and os.path.isfile(startup): + with open(startup) as f: + eval(compile(f.read(), startup, "exec"), ctx) + + ctx.update(app.make_shell_context()) + + # Site, customize, or startup script can set a hook to call when + # entering interactive mode. The default one sets up readline with + # tab and history completion. + interactive_hook = getattr(sys, "__interactivehook__", None) + + if interactive_hook is not None: + try: + import readline + from rlcompleter import Completer + except ImportError: + pass + else: + # rlcompleter uses __main__.__dict__ by default, which is + # flask.__main__. Use the shell context instead. + readline.set_completer(Completer(ctx).complete) + + interactive_hook() + + code.interact(banner=banner, local=ctx) + + +@click.command("routes", short_help="Show the routes for the app.") +@click.option( + "--sort", + "-s", + type=click.Choice(("endpoint", "methods", "rule", "match")), + default="endpoint", + help=( + 'Method to sort routes by. "match" is the order that Flask will match ' + "routes when dispatching a request." + ), +) +@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") +@with_appcontext +def routes_command(sort: str, all_methods: bool) -> None: + """Show all registered routes with endpoints and methods.""" + + rules = list(current_app.url_map.iter_rules()) + if not rules: + click.echo("No routes were registered.") + return + + ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) + + if sort in ("endpoint", "rule"): + rules = sorted(rules, key=attrgetter(sort)) + elif sort == "methods": + rules = sorted(rules, key=lambda rule: sorted(rule.methods)) # type: ignore + + rule_methods = [ + ", ".join(sorted(rule.methods - ignored_methods)) # type: ignore + for rule in rules + ] + + headers = ("Endpoint", "Methods", "Rule") + widths = ( + max(len(rule.endpoint) for rule in rules), + max(len(methods) for methods in rule_methods), + max(len(rule.rule) for rule in rules), + ) + widths = [max(len(h), w) for h, w in zip(headers, widths)] + row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) + + click.echo(row.format(*headers).strip()) + click.echo(row.format(*("-" * width for width in widths))) + + for rule, methods in zip(rules, rule_methods): + click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) + + +cli = FlaskGroup( + help="""\ +A general utility script for Flask applications. + +Provides commands from Flask, extensions, and the application. Loads the +application defined in the FLASK_APP environment variable, or from a wsgi.py +file. Setting the FLASK_ENV environment variable to 'development' will enable +debug mode. + +\b + {prefix}{cmd} FLASK_APP=hello.py + {prefix}{cmd} FLASK_ENV=development + {prefix}flask run +""".format( + cmd="export" if os.name == "posix" else "set", + prefix="$ " if os.name == "posix" else "> ", + ) +) + + +def main() -> None: + if int(click.__version__[0]) < 8: + warnings.warn( + "Using the `flask` cli with Click 7 is deprecated and" + " will not be supported starting with Flask 2.1." + " Please upgrade to Click 8 as soon as possible.", + DeprecationWarning, + ) + # TODO omit sys.argv once https://github.com/pallets/click/issues/536 is fixed + cli.main(args=sys.argv[1:]) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.8/site-packages/flask/config.py b/venv/lib/python3.8/site-packages/flask/config.py new file mode 100644 index 0000000..ca76902 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/config.py @@ -0,0 +1,295 @@ +import errno +import os +import types +import typing as t + +from werkzeug.utils import import_string + + +class ConfigAttribute: + """Makes an attribute forward to the config""" + + def __init__(self, name: str, get_converter: t.Optional[t.Callable] = None) -> None: + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any: + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj: t.Any, value: t.Any) -> None: + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__(self, root_path: str, defaults: t.Optional[dict] = None) -> None: + dict.__init__(self, defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name: str, silent: bool = False) -> bool: + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError( + f"The environment variable {variable_name!r} is not set" + " and as such configuration could not be loaded. Set" + " this variable and make it point to a configuration" + " file" + ) + return self.from_pyfile(rv, silent=silent) + + def from_pyfile(self, filename: str, silent: bool = False) -> bool: + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = types.ModuleType("config") + d.__file__ = filename + try: + with open(filename, mode="rb") as config_file: + exec(compile(config_file.read(), filename, "exec"), d.__dict__) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): + return False + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + self.from_object(d) + return True + + def from_object(self, obj: t.Union[object, str]) -> None: + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. :meth:`from_object` + loads only the uppercase attributes of the module/class. A ``dict`` + object will not work with :meth:`from_object` because the keys of a + ``dict`` are not attributes of the ``dict`` class. + + Example of module-based configuration:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + Nothing is done to the object before loading. If the object is a + class and has ``@property`` attributes, it needs to be + instantiated before being passed to this method. + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + See :ref:`config-dev-prod` for an example of class-based configuration + using :meth:`from_object`. + + :param obj: an import name or object + """ + if isinstance(obj, str): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def from_file( + self, + filename: str, + load: t.Callable[[t.IO[t.Any]], t.Mapping], + silent: bool = False, + ) -> bool: + """Update the values in the config from a file that is loaded + using the ``load`` parameter. The loaded data is passed to the + :meth:`from_mapping` method. + + .. code-block:: python + + import toml + app.config.from_file("config.toml", load=toml.load) + + :param filename: The path to the data file. This can be an + absolute path or relative to the config root path. + :param load: A callable that takes a file handle and returns a + mapping of loaded data from the file. + :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` + implements a ``read`` method. + :param silent: Ignore the file if it doesn't exist. + :return: ``True`` if the file was loaded successfully. + + .. versionadded:: 2.0 + """ + filename = os.path.join(self.root_path, filename) + + try: + with open(filename) as f: + obj = load(f) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + + return self.from_mapping(obj) + + def from_json(self, filename: str, silent: bool = False) -> bool: + """Update the values in the config from a JSON file. The loaded + data is passed to the :meth:`from_mapping` method. + + :param filename: The path to the JSON file. This can be an + absolute path or relative to the config root path. + :param silent: Ignore the file if it doesn't exist. + :return: ``True`` if the file was loaded successfully. + + .. deprecated:: 2.0.0 + Will be removed in Flask 2.1. Use :meth:`from_file` instead. + This was removed early in 2.0.0, was added back in 2.0.1. + + .. versionadded:: 0.11 + """ + import warnings + from . import json + + warnings.warn( + "'from_json' is deprecated and will be removed in Flask" + " 2.1. Use 'from_file(path, json.load)' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.from_file(filename, json.load, silent=silent) + + def from_mapping( + self, mapping: t.Optional[t.Mapping[str, t.Any]] = None, **kwargs: t.Any + ) -> bool: + """Updates the config like :meth:`update` ignoring items with non-upper + keys. + :return: Always returns ``True``. + + .. versionadded:: 0.11 + """ + mappings: t.Dict[str, t.Any] = {} + if mapping is not None: + mappings.update(mapping) + mappings.update(kwargs) + for key, value in mappings.items(): + if key.isupper(): + self[key] = value + return True + + def get_namespace( + self, namespace: str, lowercase: bool = True, trim_namespace: bool = True + ) -> t.Dict[str, t.Any]: + """Returns a dictionary containing a subset of configuration options + that match the specified namespace/prefix. Example usage:: + + app.config['IMAGE_STORE_TYPE'] = 'fs' + app.config['IMAGE_STORE_PATH'] = '/var/app/images' + app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' + image_store_config = app.config.get_namespace('IMAGE_STORE_') + + The resulting dictionary `image_store_config` would look like:: + + { + 'type': 'fs', + 'path': '/var/app/images', + 'base_url': 'http://img.website.com' + } + + This is often useful when configuration options map directly to + keyword arguments in functions or class constructors. + + :param namespace: a configuration namespace + :param lowercase: a flag indicating if the keys of the resulting + dictionary should be lowercase + :param trim_namespace: a flag indicating if the keys of the resulting + dictionary should not include the namespace + + .. versionadded:: 0.11 + """ + rv = {} + for k, v in self.items(): + if not k.startswith(namespace): + continue + if trim_namespace: + key = k[len(namespace) :] + else: + key = k + if lowercase: + key = key.lower() + rv[key] = v + return rv + + def __repr__(self) -> str: + return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv/lib/python3.8/site-packages/flask/ctx.py b/venv/lib/python3.8/site-packages/flask/ctx.py new file mode 100644 index 0000000..5c06463 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/ctx.py @@ -0,0 +1,480 @@ +import sys +import typing as t +from functools import update_wrapper +from types import TracebackType + +from werkzeug.exceptions import HTTPException + +from .globals import _app_ctx_stack +from .globals import _request_ctx_stack +from .signals import appcontext_popped +from .signals import appcontext_pushed +from .typing import AfterRequestCallable + +if t.TYPE_CHECKING: + from .app import Flask + from .sessions import SessionMixin + from .wrappers import Request + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals: + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + # Define attr methods to let mypy know this is a namespace object + # that has arbitrary attributes. + + def __getattr__(self, name: str) -> t.Any: + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def __setattr__(self, name: str, value: t.Any) -> None: + self.__dict__[name] = value + + def __delattr__(self, name: str) -> None: + try: + del self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def get(self, name: str, default: t.Optional[t.Any] = None) -> t.Any: + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raising a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name: str, default: t.Any = None) -> t.Any: + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item: str) -> bool: + return item in self.__dict__ + + def __iter__(self) -> t.Iterator[str]: + return iter(self.__dict__) + + def __repr__(self) -> str: + top = _app_ctx_stack.top + if top is not None: + return f"" + return object.__repr__(self) + + +def after_this_request(f: AfterRequestCallable) -> AfterRequestCallable: + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + _request_ctx_stack.top._after_request_functions.append(f) + return f + + +def copy_current_request_context(f: t.Callable) -> t.Callable: + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. The current session is also + included in the copied request context. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request or + # flask.session like you would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + top = _request_ctx_stack.top + if top is None: + raise RuntimeError( + "This decorator can only be used at local scopes " + "when a request context is on the stack. For instance within " + "view functions." + ) + reqctx = top.copy() + + def wrapper(*args, **kwargs): + with reqctx: + return f(*args, **kwargs) + + return update_wrapper(wrapper, f) + + +def has_request_context() -> bool: + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g`) for truthness:: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _request_ctx_stack.top is not None + + +def has_app_context() -> bool: + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _app_ctx_stack.top is not None + + +class AppContext: + """The application context binds an application object implicitly + to the current thread or greenlet, similar to how the + :class:`RequestContext` binds request information. The application + context is also implicitly created if a request context is created + but the application is not on top of the individual application + context. + """ + + def __init__(self, app: "Flask") -> None: + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g = app.app_ctx_globals_class() + + # Like request context, app contexts can be pushed multiple times + # but there a basic "refcount" is enough to track them. + self._refcnt = 0 + + def push(self) -> None: + """Binds the app context to the current context.""" + self._refcnt += 1 + _app_ctx_stack.push(self) + appcontext_pushed.send(self.app) + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the app context.""" + try: + self._refcnt -= 1 + if self._refcnt <= 0: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + rv = _app_ctx_stack.pop() + assert rv is self, f"Popped wrong app context. ({rv!r} instead of {self!r})" + appcontext_popped.send(self.app) + + def __enter__(self) -> "AppContext": + self.push() + return self + + def __exit__( + self, exc_type: type, exc_value: BaseException, tb: TracebackType + ) -> None: + self.pop(exc_value) + + +class RequestContext: + """The request context contains all request relevant information. It is + created at the beginning of the request and pushed to the + `_request_ctx_stack` and removed at the end of it. It will create the + URL adapter and request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the request + for you. In debug mode the request context is kept around if + exceptions happen so that interactive debuggers have a chance to + introspect the data. With 0.4 this can also be forced for requests + that did not fail and outside of ``DEBUG`` mode. By setting + ``'flask._preserve_context'`` to ``True`` on the WSGI environment the + context will not pop itself at the end of the request. This is used by + the :meth:`~flask.Flask.test_client` for example to implement the + deferred cleanup functionality. + + You might find this helpful for unittests where you need the + information from the context local around for a little longer. Make + sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in + that situation, otherwise your unittests will leak memory. + """ + + def __init__( + self, + app: "Flask", + environ: dict, + request: t.Optional["Request"] = None, + session: t.Optional["SessionMixin"] = None, + ) -> None: + self.app = app + if request is None: + request = app.request_class(environ) + self.request = request + self.url_adapter = None + try: + self.url_adapter = app.create_url_adapter(self.request) + except HTTPException as e: + self.request.routing_exception = e + self.flashes = None + self.session = session + + # Request contexts can be pushed multiple times and interleaved with + # other request contexts. Now only if the last level is popped we + # get rid of them. Additionally if an application context is missing + # one is created implicitly so for each level we add this information + self._implicit_app_ctx_stack: t.List[t.Optional["AppContext"]] = [] + + # indicator if the context was preserved. Next time another context + # is pushed the preserved context is popped. + self.preserved = False + + # remembers the exception for pop if there is one in case the context + # preservation kicks in. + self._preserved_exc = None + + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions: t.List[AfterRequestCallable] = [] + + @property + def g(self) -> AppContext: + return _app_ctx_stack.top.g + + @g.setter + def g(self, value: AppContext) -> None: + _app_ctx_stack.top.g = value + + def copy(self) -> "RequestContext": + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + + .. versionchanged:: 1.1 + The current session object is used instead of reloading the original + data. This prevents `flask.session` pointing to an out-of-date object. + """ + return self.__class__( + self.app, + environ=self.request.environ, + request=self.request, + session=self.session, + ) + + def match_request(self) -> None: + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + result = self.url_adapter.match(return_rule=True) # type: ignore + self.request.url_rule, self.request.view_args = result # type: ignore + except HTTPException as e: + self.request.routing_exception = e + + def push(self) -> None: + """Binds the request context to the current context.""" + # If an exception occurs in debug mode or if context preservation is + # activated under exception situations exactly one context stays + # on the stack. The rationale is that you want to access that + # information under debug situations. However if someone forgets to + # pop that context again we want to make sure that on the next push + # it's invalidated, otherwise we run at risk that something leaks + # memory. This is usually only a problem in test suite since this + # functionality is not active in production environments. + top = _request_ctx_stack.top + if top is not None and top.preserved: + top.pop(top._preserved_exc) + + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _app_ctx_stack.top + if app_ctx is None or app_ctx.app != self.app: + app_ctx = self.app.app_context() + app_ctx.push() + self._implicit_app_ctx_stack.append(app_ctx) + else: + self._implicit_app_ctx_stack.append(None) + + _request_ctx_stack.push(self) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session(self.app, self.request) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + # Match the request URL after loading the session, so that the + # session is available in custom URL converters. + if self.url_adapter is not None: + self.match_request() + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + app_ctx = self._implicit_app_ctx_stack.pop() + clear_request = False + + try: + if not self._implicit_app_ctx_stack: + self.preserved = False + self._preserved_exc = None + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + request_close = getattr(self.request, "close", None) + if request_close is not None: + request_close() + clear_request = True + finally: + rv = _request_ctx_stack.pop() + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + rv.request.environ["werkzeug.request"] = None + + # Get rid of the app as well if necessary. + if app_ctx is not None: + app_ctx.pop(exc) + + assert ( + rv is self + ), f"Popped wrong request context. ({rv!r} instead of {self!r})" + + def auto_pop(self, exc: t.Optional[BaseException]) -> None: + if self.request.environ.get("flask._preserve_context") or ( + exc is not None and self.app.preserve_context_on_exception + ): + self.preserved = True + self._preserved_exc = exc # type: ignore + else: + self.pop(exc) + + def __enter__(self) -> "RequestContext": + self.push() + return self + + def __exit__( + self, exc_type: type, exc_value: BaseException, tb: TracebackType + ) -> None: + # do not pop the request stack if we are in debug mode and an + # exception happened. This will allow the debugger to still + # access the request object in the interactive shell. Furthermore + # the context can be force kept alive for the test client. + # See flask.testing for how this works. + self.auto_pop(exc_value) + + def __repr__(self) -> str: + return ( + f"<{type(self).__name__} {self.request.url!r}" + f" [{self.request.method}] of {self.app.name}>" + ) diff --git a/venv/lib/python3.8/site-packages/flask/debughelpers.py b/venv/lib/python3.8/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..212f7d7 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/debughelpers.py @@ -0,0 +1,172 @@ +import os +import typing as t +from warnings import warn + +from .app import Flask +from .blueprints import Blueprint +from .globals import _request_ctx_stack + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = [ + f"You tried to access the file {key!r} in the request.files" + " dictionary but it does not exist. The mimetype for the" + f" request is {request.mimetype!r} instead of" + " 'multipart/form-data' which means that no file contents" + " were transmitted. To fix this error you should provide" + ' enctype="multipart/form-data" in your form.' + ] + if form_matches: + names = ", ".join(repr(x) for x in form_matches) + buf.append( + "\n\nThe browser instead transmitted some file names. " + f"This was submitted: {names}" + ) + self.msg = "".join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised by Flask in debug mode if it detects a + redirect caused by the routing system when the request method is not + GET, HEAD or OPTIONS. Reasoning: form data will be dropped. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = [ + f"A request was sent to this URL ({request.url}) but a" + " redirect was issued automatically by the routing system" + f" to {exc.new_url!r}." + ] + + # In case just a slash was appended we can be extra helpful + if f"{request.base_url}/" == exc.new_url.split("?")[0]: + buf.append( + " The URL was defined with a trailing slash so Flask" + " will automatically redirect to the URL with the" + " trailing slash if it was accessed without one." + ) + + buf.append( + " Make sure to directly send your" + f" {request.method}-request to this URL since we can't make" + " browsers or HTTP clients redirect with form data reliably" + " or without user interaction." + ) + buf.append("\n\nNote: this exception is only raised in debug mode") + AssertionError.__init__(self, "".join(buf).encode("utf-8")) + + +def attach_enctype_error_multidict(request): + """Since Flask 0.8 we're monkeypatching the files object in case a + request is detected that does not use multipart form data but the files + object is accessed. + """ + oldcls = request.files.__class__ + + class newcls(oldcls): + def __getitem__(self, key): + try: + return oldcls.__getitem__(self, key) + except KeyError as e: + if key not in request.form: + raise + + raise DebugFilesKeyError(request, key) from e + + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader) -> t.Generator: + yield f"class: {type(loader).__module__}.{type(loader).__name__}" + for key, value in sorted(loader.__dict__.items()): + if key.startswith("_"): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, str) for x in value): + continue + yield f"{key}:" + for item in value: + yield f" - {item}" + continue + elif not isinstance(value, (str, int, float, bool)): + continue + yield f"{key}: {value!r}" + + +def explain_template_loading_attempts(app: Flask, template, attempts) -> None: + """This should help developers understand what failed""" + info = [f"Locating template {template!r}:"] + total_found = 0 + blueprint = None + reqctx = _request_ctx_stack.top + if reqctx is not None and reqctx.request.blueprint is not None: + blueprint = reqctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = f"application {srcobj.import_name!r}" + elif isinstance(srcobj, Blueprint): + src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" + else: + src_info = repr(srcobj) + + info.append(f"{idx + 1:5}: trying loader of {src_info}") + + for line in _dump_loader_info(loader): + info.append(f" {line}") + + if triple is None: + detail = "no match" + else: + detail = f"found ({triple[1] or ''!r})" + total_found += 1 + info.append(f" -> {detail}") + + seems_fishy = False + if total_found == 0: + info.append("Error: the template could not be found.") + seems_fishy = True + elif total_found > 1: + info.append("Warning: multiple loaders returned a match for the template.") + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append( + " The template was looked up from an endpoint that belongs" + f" to the blueprint {blueprint!r}." + ) + info.append(" Maybe you did not place a template in the right folder?") + info.append(" See https://flask.palletsprojects.com/blueprints/#templates") + + app.logger.info("\n".join(info)) + + +def explain_ignored_app_run() -> None: + if os.environ.get("WERKZEUG_RUN_MAIN") != "true": + warn( + Warning( + "Silently ignoring app.run() because the application is" + " run from the flask command line executable. Consider" + ' putting app.run() behind an if __name__ == "__main__"' + " guard to silence this warning." + ), + stacklevel=3, + ) diff --git a/venv/lib/python3.8/site-packages/flask/globals.py b/venv/lib/python3.8/site-packages/flask/globals.py new file mode 100644 index 0000000..6d91c75 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/globals.py @@ -0,0 +1,59 @@ +import typing as t +from functools import partial + +from werkzeug.local import LocalProxy +from werkzeug.local import LocalStack + +if t.TYPE_CHECKING: + from .app import Flask + from .ctx import _AppCtxGlobals + from .sessions import SessionMixin + from .wrappers import Request + +_request_ctx_err_msg = """\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +""" +_app_ctx_err_msg = """\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +to interface with the current application object in some way. To solve +this, set up an application context with app.app_context(). See the +documentation for more information.\ +""" + + +def _lookup_req_object(name): + top = _request_ctx_stack.top + if top is None: + raise RuntimeError(_request_ctx_err_msg) + return getattr(top, name) + + +def _lookup_app_object(name): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return getattr(top, name) + + +def _find_app(): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError(_app_ctx_err_msg) + return top.app + + +# context locals +_request_ctx_stack = LocalStack() +_app_ctx_stack = LocalStack() +current_app: "Flask" = LocalProxy(_find_app) # type: ignore +request: "Request" = LocalProxy(partial(_lookup_req_object, "request")) # type: ignore +session: "SessionMixin" = LocalProxy( # type: ignore + partial(_lookup_req_object, "session") +) +g: "_AppCtxGlobals" = LocalProxy(partial(_lookup_app_object, "g")) # type: ignore diff --git a/venv/lib/python3.8/site-packages/flask/helpers.py b/venv/lib/python3.8/site-packages/flask/helpers.py new file mode 100644 index 0000000..7b8b087 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/helpers.py @@ -0,0 +1,836 @@ +import os +import pkgutil +import socket +import sys +import typing as t +import warnings +from datetime import datetime +from datetime import timedelta +from functools import lru_cache +from functools import update_wrapper +from threading import RLock + +import werkzeug.utils +from werkzeug.exceptions import NotFound +from werkzeug.routing import BuildError +from werkzeug.urls import url_quote + +from .globals import _app_ctx_stack +from .globals import _request_ctx_stack +from .globals import current_app +from .globals import request +from .globals import session +from .signals import message_flashed + +if t.TYPE_CHECKING: + from .wrappers import Response + + +def get_env() -> str: + """Get the environment the app is running in, indicated by the + :envvar:`FLASK_ENV` environment variable. The default is + ``'production'``. + """ + return os.environ.get("FLASK_ENV") or "production" + + +def get_debug_flag() -> bool: + """Get whether debug mode should be enabled for the app, indicated + by the :envvar:`FLASK_DEBUG` environment variable. The default is + ``True`` if :func:`.get_env` returns ``'development'``, or ``False`` + otherwise. + """ + val = os.environ.get("FLASK_DEBUG") + + if not val: + return get_env() == "development" + + return val.lower() not in ("0", "false", "no") + + +def get_load_dotenv(default: bool = True) -> bool: + """Get whether the user has disabled loading dotenv files by setting + :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the + files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get("FLASK_SKIP_DOTENV") + + if not val: + return default + + return val.lower() in ("0", "false", "no") + + +def stream_with_context( + generator_or_function: t.Union[ + t.Iterator[t.AnyStr], t.Callable[..., t.Iterator[t.AnyStr]] + ] +) -> t.Iterator[t.AnyStr]: + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) # type: ignore + except TypeError: + + def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: + gen = generator_or_function(*args, **kwargs) # type: ignore + return stream_with_context(gen) + + return update_wrapper(decorator, generator_or_function) # type: ignore + + def generator() -> t.Generator: + ctx = _request_ctx_stack.top + if ctx is None: + raise RuntimeError( + "Attempted to stream with context but " + "there was no context in the first place to keep around." + ) + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + yield from gen + finally: + if hasattr(gen, "close"): + gen.close() # type: ignore + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args: t.Any) -> "Response": + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) + + +def url_for(endpoint: str, **values: t.Any) -> str: + """Generates a URL to the given endpoint with the method provided. + + Variable arguments that are unknown to the target endpoint are appended + to the generated URL as query arguments. If the value of a query argument + is ``None``, the whole pair is skipped. In case blueprints are active + you can shortcut references to the same blueprint by prefixing the + local endpoint with a dot (``.``). + + This will reference the index function local to the current blueprint:: + + url_for('.index') + + See :ref:`url-building`. + + Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when + generating URLs outside of a request context. + + To integrate applications, :class:`Flask` has a hook to intercept URL build + errors through :attr:`Flask.url_build_error_handlers`. The `url_for` + function results in a :exc:`~werkzeug.routing.BuildError` when the current + app does not have a URL for the given endpoint and values. When it does, the + :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if + it is not ``None``, which can return a string to use as the result of + `url_for` (instead of `url_for`'s default to raise the + :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. + An example:: + + def external_url_handler(error, endpoint, values): + "Looks up an external URL when `url_for` cannot build a URL." + # This is an example of hooking the build_error_handler. + # Here, lookup_url is some utility function you've built + # which looks up the endpoint in some external URL registry. + url = lookup_url(endpoint, **values) + if url is None: + # External lookup did not have a URL. + # Re-raise the BuildError, in context of original traceback. + exc_type, exc_value, tb = sys.exc_info() + if exc_value is error: + raise exc_type(exc_value).with_traceback(tb) + else: + raise error + # url_for will use this result, instead of raising BuildError. + return url + + app.url_build_error_handlers.append(external_url_handler) + + Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and + `endpoint` and `values` are the arguments passed into `url_for`. Note + that this is for building URLs outside the current application, and not for + handling 404 NotFound errors. + + .. versionadded:: 0.10 + The `_scheme` parameter was added. + + .. versionadded:: 0.9 + The `_anchor` and `_method` parameters were added. + + .. versionadded:: 0.9 + Calls :meth:`Flask.handle_build_error` on + :exc:`~werkzeug.routing.BuildError`. + + :param endpoint: the endpoint of the URL (name of the function) + :param values: the variable arguments of the URL rule + :param _external: if set to ``True``, an absolute URL is generated. Server + address can be changed via ``SERVER_NAME`` configuration variable which + falls back to the `Host` header, then to the IP and port of the request. + :param _scheme: a string specifying the desired URL scheme. The `_external` + parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default + behavior uses the same scheme as the current request, or + :data:`PREFERRED_URL_SCHEME` if no request context is available. + This also can be set to an empty string to build protocol-relative + URLs. + :param _anchor: if provided this is added as anchor to the URL. + :param _method: if provided this explicitly specifies an HTTP method. + """ + appctx = _app_ctx_stack.top + reqctx = _request_ctx_stack.top + + if appctx is None: + raise RuntimeError( + "Attempted to generate a URL without the application context being" + " pushed. This has to be executed when application context is" + " available." + ) + + # If request specific information is available we have some extra + # features that support "relative" URLs. + if reqctx is not None: + url_adapter = reqctx.url_adapter + blueprint_name = request.blueprint + + if endpoint[:1] == ".": + if blueprint_name is not None: + endpoint = f"{blueprint_name}{endpoint}" + else: + endpoint = endpoint[1:] + + external = values.pop("_external", False) + + # Otherwise go with the url adapter from the appctx and make + # the URLs external by default. + else: + url_adapter = appctx.url_adapter + + if url_adapter is None: + raise RuntimeError( + "Application was not able to create a URL adapter for request" + " independent URL generation. You might be able to fix this by" + " setting the SERVER_NAME config variable." + ) + + external = values.pop("_external", True) + + anchor = values.pop("_anchor", None) + method = values.pop("_method", None) + scheme = values.pop("_scheme", None) + appctx.app.inject_url_defaults(endpoint, values) + + # This is not the best way to deal with this but currently the + # underlying Werkzeug router does not support overriding the scheme on + # a per build call basis. + old_scheme = None + if scheme is not None: + if not external: + raise ValueError("When specifying _scheme, _external must be True") + old_scheme = url_adapter.url_scheme + url_adapter.url_scheme = scheme + + try: + try: + rv = url_adapter.build( + endpoint, values, method=method, force_external=external + ) + finally: + if old_scheme is not None: + url_adapter.url_scheme = old_scheme + except BuildError as error: + # We need to inject the values again so that the app callback can + # deal with that sort of stuff. + values["_external"] = external + values["_anchor"] = anchor + values["_method"] = method + values["_scheme"] = scheme + return appctx.app.handle_url_build_error(error, endpoint, values) + + if anchor is not None: + rv += f"#{url_quote(anchor)}" + return rv + + +def get_template_attribute(template_name: str, attribute: str) -> t.Any: + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, attribute) + + +def flash(message: str, category: str = "message") -> None: + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get("_flashes", []) + flashes.append((category, message)) + session["_flashes"] = flashes + message_flashed.send( + current_app._get_current_object(), # type: ignore + message=message, + category=category, + ) + + +def get_flashed_messages( + with_categories: bool = False, category_filter: t.Iterable[str] = () +) -> t.Union[t.List[str], t.List[t.Tuple[str, str]]]: + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :doc:`/patterns/flashing` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: filter of categories to limit return values. Only + categories in the list will be returned. + """ + flashes = _request_ctx_stack.top.flashes + if flashes is None: + _request_ctx_stack.top.flashes = flashes = ( + session.pop("_flashes") if "_flashes" in session else [] + ) + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def _prepare_send_file_kwargs( + download_name: t.Optional[str] = None, + attachment_filename: t.Optional[str] = None, + etag: t.Optional[t.Union[bool, str]] = None, + add_etags: t.Optional[t.Union[bool]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, + cache_timeout: t.Optional[int] = None, + **kwargs: t.Any, +) -> t.Dict[str, t.Any]: + if attachment_filename is not None: + warnings.warn( + "The 'attachment_filename' parameter has been renamed to" + " 'download_name'. The old name will be removed in Flask" + " 2.1.", + DeprecationWarning, + stacklevel=3, + ) + download_name = attachment_filename + + if cache_timeout is not None: + warnings.warn( + "The 'cache_timeout' parameter has been renamed to" + " 'max_age'. The old name will be removed in Flask 2.1.", + DeprecationWarning, + stacklevel=3, + ) + max_age = cache_timeout + + if add_etags is not None: + warnings.warn( + "The 'add_etags' parameter has been renamed to 'etag'. The" + " old name will be removed in Flask 2.1.", + DeprecationWarning, + stacklevel=3, + ) + etag = add_etags + + if max_age is None: + max_age = current_app.get_send_file_max_age + + kwargs.update( + environ=request.environ, + download_name=download_name, + etag=etag, + max_age=max_age, + use_x_sendfile=current_app.use_x_sendfile, + response_class=current_app.response_class, + _root_path=current_app.root_path, # type: ignore + ) + return kwargs + + +def send_file( + path_or_file: t.Union[os.PathLike, str, t.BinaryIO], + mimetype: t.Optional[str] = None, + as_attachment: bool = False, + download_name: t.Optional[str] = None, + attachment_filename: t.Optional[str] = None, + conditional: bool = True, + etag: t.Union[bool, str] = True, + add_etags: t.Optional[bool] = None, + last_modified: t.Optional[t.Union[datetime, int, float]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, + cache_timeout: t.Optional[int] = None, +): + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. Use :func:`send_from_directory` to safely serve + user-requested paths from within a directory. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, configuring Flask with + ``USE_X_SENDFILE = True`` will tell the server to send the given + path, which is much more efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + + .. versionchanged:: 2.0 + ``download_name`` replaces the ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces the ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces the ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + Passing a file-like object that inherits from + :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather + than sending an empty file. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionchanged:: 1.1 + ``filename`` may be a :class:`~os.PathLike` object. + + .. versionchanged:: 1.1 + Passing a :class:`~io.BytesIO` object supports range requests. + + .. versionchanged:: 1.0.3 + Filenames are encoded with ASCII instead of Latin-1 for broader + compatibility with WSGI servers. + + .. versionchanged:: 1.0 + UTF-8 filenames as specified in :rfc:`2231` are supported. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file + objects. If you want to use automatic MIME and etag support, + pass a filename via ``filename_or_fp`` or + ``attachment_filename``. + + .. versionchanged:: 0.12 + ``attachment_filename`` is preferred over ``filename`` for MIME + detection. + + .. versionchanged:: 0.9 + ``cache_timeout`` defaults to + :meth:`Flask.get_send_file_max_age`. + + .. versionchanged:: 0.7 + MIME guessing and etag support for file-like objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. + + .. versionchanged:: 0.5 + The ``add_etags``, ``cache_timeout`` and ``conditional`` + parameters were added. The default behavior is to add etags. + + .. versionadded:: 0.2 + """ + return werkzeug.utils.send_file( + **_prepare_send_file_kwargs( + path_or_file=path_or_file, + environ=request.environ, + mimetype=mimetype, + as_attachment=as_attachment, + download_name=download_name, + attachment_filename=attachment_filename, + conditional=conditional, + etag=etag, + add_etags=add_etags, + last_modified=last_modified, + max_age=max_age, + cache_timeout=cache_timeout, + ) + ) + + +def safe_join(directory: str, *pathnames: str) -> str: + """Safely join zero or more untrusted path components to a base + directory to avoid escaping the base directory. + + :param directory: The trusted base directory. + :param pathnames: The untrusted path components relative to the + base directory. + :return: A safe path, otherwise ``None``. + """ + warnings.warn( + "'flask.helpers.safe_join' is deprecated and will be removed in" + " Flask 2.1. Use 'werkzeug.utils.safe_join' instead.", + DeprecationWarning, + stacklevel=2, + ) + path = werkzeug.utils.safe_join(directory, *pathnames) + + if path is None: + raise NotFound() + + return path + + +def send_from_directory( + directory: t.Union[os.PathLike, str], + path: t.Union[os.PathLike, str], + filename: t.Optional[str] = None, + **kwargs: t.Any, +) -> "Response": + """Send a file from within a directory using :func:`send_file`. + + .. code-block:: python + + @app.route("/uploads/") + def download_file(name): + return send_from_directory( + app.config['UPLOAD_FOLDER'], name, as_attachment=True + ) + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under. + :param path: The path to the file to send, relative to + ``directory``. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionchanged:: 2.0 + ``path`` replaces the ``filename`` parameter. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionadded:: 0.5 + """ + if filename is not None: + warnings.warn( + "The 'filename' parameter has been renamed to 'path'. The" + " old name will be removed in Flask 2.1.", + DeprecationWarning, + stacklevel=2, + ) + path = filename + + return werkzeug.utils.send_from_directory( # type: ignore + directory, path, **_prepare_send_file_kwargs(**kwargs) + ) + + +def get_root_path(import_name: str) -> str: + """Find the root path of a package, or the path that contains a + module. If it cannot be found, returns the current working + directory. + + Not to be confused with the value returned by :func:`find_package`. + + :meta private: + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + + if mod is not None and hasattr(mod, "__file__"): + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main + # module or a main module without path (interactive sessions), go + # with the current working directory. + if loader is None or import_name == "__main__": + return os.getcwd() + + if hasattr(loader, "get_filename"): + filepath = loader.get_filename(import_name) # type: ignore + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, "__file__", None) + + # If we don't have a file path it might be because it is a + # namespace package. In this case pick the root path from the + # first module that is contained in the package. + if filepath is None: + raise RuntimeError( + "No root path can be found for the provided module" + f" {import_name!r}. This can happen because the module" + " came from an import hook that does not provide file" + " name information or because it's a namespace package." + " In this case the root path needs to be explicitly" + " provided." + ) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +class locked_cached_property(werkzeug.utils.cached_property): + """A :func:`property` that is only evaluated once. Like + :class:`werkzeug.utils.cached_property` except access uses a lock + for thread safety. + + .. versionchanged:: 2.0 + Inherits from Werkzeug's ``cached_property`` (and ``property``). + """ + + def __init__( + self, + fget: t.Callable[[t.Any], t.Any], + name: t.Optional[str] = None, + doc: t.Optional[str] = None, + ) -> None: + super().__init__(fget, name=name, doc=doc) + self.lock = RLock() + + def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore + if obj is None: + return self + + with self.lock: + return super().__get__(obj, type=type) + + def __set__(self, obj: object, value: t.Any) -> None: + with self.lock: + super().__set__(obj, value) + + def __delete__(self, obj: object) -> None: + with self.lock: + super().__delete__(obj) + + +def total_seconds(td: timedelta) -> int: + """Returns the total seconds from a timedelta object. + + :param timedelta td: the timedelta to be converted in seconds + + :returns: number of seconds + :rtype: int + + .. deprecated:: 2.0 + Will be removed in Flask 2.1. Use + :meth:`timedelta.total_seconds` instead. + """ + warnings.warn( + "'total_seconds' is deprecated and will be removed in Flask" + " 2.1. Use 'timedelta.total_seconds' instead.", + DeprecationWarning, + stacklevel=2, + ) + return td.days * 60 * 60 * 24 + td.seconds + + +def is_ip(value: str) -> bool: + """Determine if the given string is an IP address. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + """ + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except OSError: + pass + else: + return True + + return False + + +@lru_cache(maxsize=None) +def _split_blueprint_path(name: str) -> t.List[str]: + out: t.List[str] = [name] + + if "." in name: + out.extend(_split_blueprint_path(name.rpartition(".")[0])) + + return out diff --git a/venv/lib/python3.8/site-packages/flask/json/__init__.py b/venv/lib/python3.8/site-packages/flask/json/__init__.py new file mode 100644 index 0000000..10d5123 --- /dev/null +++ b/venv/lib/python3.8/site-packages/flask/json/__init__.py @@ -0,0 +1,357 @@ +import decimal +import io +import json as _json +import typing as t +import uuid +import warnings +from datetime import date + +from jinja2.utils import htmlsafe_json_dumps as _jinja_htmlsafe_dumps +from werkzeug.http import http_date + +from ..globals import current_app +from ..globals import request + +if t.TYPE_CHECKING: + from ..app import Flask + from ..wrappers import Response + +try: + import dataclasses +except ImportError: + # Python < 3.7 + dataclasses = None # type: ignore + + +class JSONEncoder(_json.JSONEncoder): + """The default JSON encoder. Handles extra types compared to the + built-in :class:`json.JSONEncoder`. + + - :class:`datetime.datetime` and :class:`datetime.date` are + serialized to :rfc:`822` strings. This is the same as the HTTP + date format. + - :class:`uuid.UUID` is serialized to a string. + - :class:`dataclasses.dataclass` is passed to + :func:`dataclasses.asdict`. + - :class:`~markupsafe.Markup` (or any object with a ``__html__`` + method) will call the ``__html__`` method to get a string. + + Assign a subclass of this to :attr:`flask.Flask.json_encoder` or + :attr:`flask.Blueprint.json_encoder` to override the default. + """ + + def default(self, o: t.Any) -> t.Any: + """Convert ``o`` to a JSON serializable type. See + :meth:`json.JSONEncoder.default`. Python does not support + overriding how basic types like ``str`` or ``list`` are + serialized, they are handled before this method. + """ + if isinstance(o, date): + return http_date(o) + if isinstance(o, (decimal.Decimal, uuid.UUID)): + return str(o) + if dataclasses and dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + if hasattr(o, "__html__"): + return str(o.__html__()) + return super().default(o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. + + This does not change any behavior from the built-in + :class:`json.JSONDecoder`. + + Assign a subclass of this to :attr:`flask.Flask.json_decoder` or + :attr:`flask.Blueprint.json_decoder` to override the default. + """ + + +def _dump_arg_defaults( + kwargs: t.Dict[str, t.Any], app: t.Optional["Flask"] = None +) -> None: + """Inject default arguments for dump functions.""" + if app is None: + app = current_app + + if app: + cls = app.json_encoder + bp = app.blueprints.get(request.blueprint) if request else None # type: ignore + if bp is not None and bp.json_encoder is not None: + cls = bp.json_encoder + + kwargs.setdefault("cls", cls) + kwargs.setdefault("ensure_ascii", app.config["JSON_AS_ASCII"]) + kwargs.setdefault("sort_keys", app.config["JSON_SORT_KEYS"]) + else: + kwargs.setdefault("sort_keys", True) + kwargs.setdefault("cls", JSONEncoder) + + +def _load_arg_defaults( + kwargs: t.Dict[str, t.Any], app: t.Optional["Flask"] = None +) -> None: + """Inject default arguments for load functions.""" + if app is None: + app = current_app + + if app: + cls = app.json_decoder + bp = app.blueprints.get(request.blueprint) if request else None # type: ignore + if bp is not None and bp.json_decoder is not None: + cls = bp.json_decoder + + kwargs.setdefault("cls", cls) + else: + kwargs.setdefault("cls", JSONDecoder) + + +def dumps(obj: t.Any, app: t.Optional["Flask"] = None, **kwargs: t.Any) -> str: + """Serialize an object to a string of JSON. + + Takes the same arguments as the built-in :func:`json.dumps`, with + some defaults from application configuration. + + :param obj: Object to serialize to JSON. + :param app: Use this app's config instead of the active app context + or defaults. + :param kwargs: Extra arguments passed to :func:`json.dumps`. + + .. versionchanged:: 2.0.2 + :class:`decimal.Decimal` is supported by converting to a string. + + .. versionchanged:: 2.0 + ``encoding`` is deprecated and will be removed in Flask 2.1. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + _dump_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + rv = _json.dumps(obj, **kwargs) + + if encoding is not None: + warnings.warn( + "'encoding' is deprecated and will be removed in Flask 2.1.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(rv, str): + return rv.encode(encoding) # type: ignore + + return rv + + +def dump( + obj: t.Any, fp: t.IO[str], app: t.Optional["Flask"] = None, **kwargs: t.Any +) -> None: + """Serialize an object to JSON written to a file object. + + Takes the same arguments as the built-in :func:`json.dump`, with + some defaults from application configuration. + + :param obj: Object to serialize to JSON. + :param fp: File object to write JSON to. + :param app: Use this app's config instead of the active app context + or defaults. + :param kwargs: Extra arguments passed to :func:`json.dump`. + + .. versionchanged:: 2.0 + Writing to a binary file, and the ``encoding`` argument, is + deprecated and will be removed in Flask 2.1. + """ + _dump_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + show_warning = encoding is not None + + try: + fp.write("") + except TypeError: + show_warning = True + fp = io.TextIOWrapper(fp, encoding or "utf-8") # type: ignore + + if show_warning: + warnings.warn( + "Writing to a binary file, and the 'encoding' argument, is" + " deprecated and will be removed in Flask 2.1.", + DeprecationWarning, + stacklevel=2, + ) + + _json.dump(obj, fp, **kwargs) + + +def loads(s: str, app: t.Optional["Flask"] = None, **kwargs: t.Any) -> t.Any: + """Deserialize an object from a string of JSON. + + Takes the same arguments as the built-in :func:`json.loads`, with + some defaults from application configuration. + + :param s: JSON string to deserialize. + :param app: Use this app's config instead of the active app context + or defaults. + :param kwargs: Extra arguments passed to :func:`json.loads`. + + .. versionchanged:: 2.0 + ``encoding`` is deprecated and will be removed in Flask 2.1. The + data must be a string or UTF-8 bytes. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + _load_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + + if encoding is not None: + warnings.warn( + "'encoding' is deprecated and will be removed in Flask 2.1." + " The data must be a string or UTF-8 bytes.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(s, bytes): + s = s.decode(encoding) + + return _json.loads(s, **kwargs) + + +def load(fp: t.IO[str], app: t.Optional["Flask"] = None, **kwargs: t.Any) -> t.Any: + """Deserialize an object from JSON read from a file object. + + Takes the same arguments as the built-in :func:`json.load`, with + some defaults from application configuration. + + :param fp: File object to read JSON from. + :param app: Use this app's config instead of the active app context + or defaults. + :param kwargs: Extra arguments passed to :func:`json.load`. + + .. versionchanged:: 2.0 + ``encoding`` is deprecated and will be removed in Flask 2.1. The + file must be text mode, or binary mode with UTF-8 bytes. + """ + _load_arg_defaults(kwargs, app=app) + encoding = kwargs.pop("encoding", None) + + if encoding is not None: + warnings.warn( + "'encoding' is deprecated and will be removed in Flask 2.1." + " The file must be text mode, or binary mode with UTF-8" + " bytes.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(fp.read(0), bytes): + fp = io.TextIOWrapper(fp, encoding) # type: ignore + + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj: t.Any, **kwargs: t.Any) -> str: + """Serialize an object to a string of JSON with :func:`dumps`, then + replace HTML-unsafe characters with Unicode escapes and mark the + result safe with :class:`~markupsafe.Markup`. + + This is available in templates as the ``|tojson`` filter. + + The returned string is safe to render in HTML documents and + ``') + # => <script> do_nasty_stuff() </script> + # sanitize_html('Click here for $100') + # => Click here for $100 + def sanitize_token(self, token): + + # accommodate filters which use token_type differently + token_type = token["type"] + if token_type in ("StartTag", "EndTag", "EmptyTag"): + name = token["name"] + namespace = token["namespace"] + if ((namespace, name) in self.allowed_elements or + (namespace is None and + (namespaces["html"], name) in self.allowed_elements)): + return self.allowed_token(token) + else: + return self.disallowed_token(token) + elif token_type == "Comment": + pass + else: + return token + + def allowed_token(self, token): + if "data" in token: + attrs = token["data"] + attr_names = set(attrs.keys()) + + # Remove forbidden attributes + for to_remove in (attr_names - self.allowed_attributes): + del token["data"][to_remove] + attr_names.remove(to_remove) + + # Remove attributes with disallowed URL values + for attr in (attr_names & self.attr_val_is_uri): + assert attr in attrs + # I don't have a clue where this regexp comes from or why it matches those + # characters, nor why we call unescape. I just know it's always been here. + # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all + # this will do is remove *more* than it otherwise would. + val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', + unescape(attrs[attr])).lower() + # remove replacement characters from unescaped characters + val_unescaped = val_unescaped.replace("\ufffd", "") + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None + del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = data_content_type.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + + for attr in self.svg_attr_val_allows_ref: + if attr in attrs: + attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', + ' ', + unescape(attrs[attr])) + if (token["name"] in self.svg_allow_local_href and + (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', + attrs[(namespaces['xlink'], 'href')])): + del attrs[(namespaces['xlink'], 'href')] + if (None, 'style') in attrs: + attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) + token["data"] = attrs + return token + + def disallowed_token(self, token): + token_type = token["type"] + if token_type == "EndTag": + token["data"] = "" % token["name"] + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] + for (ns, name), v in token["data"].items(): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: + token["data"] = "<%s>" % token["name"] + if token.get("selfClosing"): + token["data"] = token["data"][:-1] + "/>" + + token["type"] = "Characters" + + del token["name"] + return token + + def sanitize_css(self, style): + # disallow urls + style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) + + # gauntlet + if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): + return '' + if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): + return '' + + clean = [] + for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): + if not value: + continue + if prop.lower() in self.allowed_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background', 'border', 'margin', + 'padding']: + for keyword in value.split(): + if keyword not in self.allowed_css_keywords and \ + not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa + break + else: + clean.append(prop + ': ' + value + ';') + elif prop.lower() in self.allowed_svg_properties: + clean.append(prop + ': ' + value + ';') + + return ' '.join(clean) diff --git a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..0d12584 --- /dev/null +++ b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(base.Filter): + """Collapses whitespace except in pre, textarea, and script elements""" + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..ae41a13 --- /dev/null +++ b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2791 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass, viewkeys + +import types +from collections import OrderedDict + +from . import _inputstream +from . import _tokenizer + +from . import treebuilders +from .treebuilders.base import Marker + +from . import _utils +from .constants import ( + spaceCharacters, asciiUpper2Lower, + specialElements, headingElements, cdataElements, rcdataElements, + tokenTypes, tagTokenTypes, + namespaces, + htmlIntegrationPointElements, mathmlTextIntegrationPointElements, + adjustForeignAttributes as adjustForeignAttributesMap, + adjustMathMLAttributes, adjustSVGAttributes, + E, + _ReparseException +) + + +def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML document as a string or file-like object into a tree + + :arg doc: the document to parse as a string or file-like object + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import parse + >>> parse('

This is a doc

') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, **kwargs) + + +def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML fragment as a string or file-like object into a tree + + :arg doc: the fragment to parse as a string or file-like object + + :arg container: the container context to parse the fragment in + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import parseFragment + >>> parseFragment('this is a fragment') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, **kwargs) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser + + Generates a tree structure from a stream of (possibly malformed) HTML. + + """ + + def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): + """ + :arg tree: a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + + :arg strict: raise an exception when a parse error is encountered + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :arg debug: whether or not to enable debug mode which logs things + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() # generates parser with etree builder + >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict + + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.errors = [] + + self.phases = dict([(name, cls(self, self.tree)) for name, cls in + getPhases(debug).items()]) + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.scripting = scripting + self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) + self.reset() + + try: + self.mainLoop() + except _ReparseException: + self.reset() + self.mainLoop() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False # pylint:disable=redefined-variable-type + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """Name of the character encoding that was used to decode the input stream, or + :obj:`None` if that is not determined yet + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0].name + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.normalizedTokens(): + prev_token = None + new_token = token + while new_token is not None: + prev_token = new_token + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + type == StartTagToken and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and prev_token["selfClosing"] and + not prev_token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": prev_token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def normalizedTokens(self): + for token in self.tokenizer: + yield self.normalizeToken(token) + + def parse(self, stream, *args, **kwargs): + """Parse a HTML document into a well-formed tree + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element). + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parse('

This is a doc

') + + + """ + self._parse(stream, False, None, *args, **kwargs) + return self.tree.getDocument() + + def parseFragment(self, stream, *args, **kwargs): + """Parse a HTML fragment into a well-formed tree fragment + + :arg container: name of the element we're setting the innerHTML + property if set to None, default to 'div' + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parseFragment('this is a fragment') + + + """ + self._parse(stream, True, *args, **kwargs) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars=None): + # XXX The idea is to make errorcode mandatory. + if datavars is None: + datavars = {} + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def normalizeToken(self, token): + # HTML5 specific normalizations to the token stream + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + token["data"] = OrderedDict(raw) + if len(raw) > len(token["data"]): + # we had some duplicated attribute, fix so first wins + token["data"].update(raw[::-1]) + + return token + + def adjustMathMLAttributes(self, token): + adjust_attributes(token, adjustMathMLAttributes) + + def adjustSVGAttributes(self, token): + adjust_attributes(token, adjustSVGAttributes) + + def adjustForeignAttributes(self, token): + adjust_attributes(token, adjustForeignAttributesMap) + + def reparseTokenNormal(self, token): + # pylint:disable=unused-argument + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + # Generic RCDATA/RAWTEXT Parsing algorithm + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +@_utils.memoize +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = dict((value, key) for key, value in + tokenTypes.items()) + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + try: + info = {"type": type_names[token['type']]} + except: + raise + if token['type'] in tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + # pylint:disable=unused-argument + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + return self.startTagHandler[token["name"]](token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + return self.endTagHandler[token["name"]](token) + + class InitialPhase(Phase): + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" or + publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) or + publicId in ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None or + systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), self.endTagImplyHead) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + class InHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("title", self.startTagTitle), + (("noframes", "style"), self.startTagNoFramesStyle), + ("noscript", self.startTagNoscript), + ("script", self.startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + self.startTagBaseLinkCommand), + ("meta", self.startTagMeta), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("head", self.endTagHead), + (("br", "html", "body"), self.endTagHtmlBodyBr) + ]) + self.endTagHandler.default = self.endTagOther + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = _inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagNoscript(self, token): + if self.parser.scripting: + self.parser.parseRCDataRawtext(token, "RAWTEXT") + else: + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inHeadNoscript"] + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + class InHeadNoscriptPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), + (("head", "noscript"), self.startTagHeadNoscript), + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("noscript", self.endTagNoscript), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.parser.parseError("eof-in-head-noscript") + self.anythingElse() + return True + + def processComment(self, token): + return self.parser.phases["inHead"].processComment(token) + + def processCharacters(self, token): + self.parser.parseError("char-in-head-noscript") + self.anythingElse() + return token + + def processSpaceCharacters(self, token): + return self.parser.phases["inHead"].processSpaceCharacters(token) + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBaseLinkCommand(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagHeadNoscript(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagNoscript(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "noscript", "Expected noscript got %s" % node.name + self.parser.phase = self.parser.phases["inHead"] + + def endTagBr(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + # Caller must raise parse error first! + self.endTagNoscript(impliedTagToken("noscript")) + + class AfterHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + self.startTagFromHead), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + self.endTagHtmlBodyBr)]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + # Set this to the default handler + self.processSpaceCharacters = self.processSpaceCharactersNonPre + + self.startTagHandler = _utils.MethodDispatcher([ + ("html", self.startTagHtml), + (("base", "basefont", "bgsound", "command", "link", "meta", + "script", "style", "title"), + self.startTagProcessInHead), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("address", "article", "aside", "blockquote", "center", "details", + "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", + "section", "summary", "ul"), + self.startTagCloseP), + (headingElements, self.startTagHeading), + (("pre", "listing"), self.startTagPreListing), + ("form", self.startTagForm), + (("li", "dd", "dt"), self.startTagListItem), + ("plaintext", self.startTagPlaintext), + ("a", self.startTagA), + (("b", "big", "code", "em", "font", "i", "s", "small", "strike", + "strong", "tt", "u"), self.startTagFormatting), + ("nobr", self.startTagNobr), + ("button", self.startTagButton), + (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), + ("xmp", self.startTagXmp), + ("table", self.startTagTable), + (("area", "br", "embed", "img", "keygen", "wbr"), + self.startTagVoidFormatting), + (("param", "source", "track"), self.startTagParamSource), + ("input", self.startTagInput), + ("hr", self.startTagHr), + ("image", self.startTagImage), + ("isindex", self.startTagIsIndex), + ("textarea", self.startTagTextarea), + ("iframe", self.startTagIFrame), + ("noscript", self.startTagNoscript), + (("noembed", "noframes"), self.startTagRawtext), + ("select", self.startTagSelect), + (("rp", "rt"), self.startTagRpRt), + (("option", "optgroup"), self.startTagOpt), + (("math"), self.startTagMath), + (("svg"), self.startTagSvg), + (("caption", "col", "colgroup", "frame", "head", + "tbody", "td", "tfoot", "th", "thead", + "tr"), self.startTagMisplaced) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = _utils.MethodDispatcher([ + ("body", self.endTagBody), + ("html", self.endTagHtml), + (("address", "article", "aside", "blockquote", "button", "center", + "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", + "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", + "section", "summary", "ul"), self.endTagBlock), + ("form", self.endTagForm), + ("p", self.endTagP), + (("dd", "dt", "li"), self.endTagListItem), + (headingElements, self.endTagHeading), + (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", + "strike", "strong", "tt", "u"), self.endTagFormatting), + (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), + ("br", self.endTagBr), + ]) + self.endTagHandler.default = self.endTagOther + + def isMatchingFormattingElement(self, node1, node2): + return (node1.name == node2.name and + node1.namespace == node2.namespace and + node1.attributes == node2.attributes) + + # helper + def addFormattingElement(self, token): + self.tree.insertElement(token) + element = self.tree.openElements[-1] + + matchingElements = [] + for node in self.tree.activeFormattingElements[::-1]: + if node is Marker: + break + elif self.isMatchingFormattingElement(node, element): + matchingElements.append(node) + + assert len(matchingElements) <= 3 + if len(matchingElements) == 3: + self.tree.activeFormattingElements.remove(matchingElements[-1]) + self.tree.activeFormattingElements.append(element) + + # the real deal + def processEOF(self): + allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", + "tfoot", "th", "thead", "tr", "body", + "html")) + for node in self.tree.openElements[::-1]: + if node.name not in allowed_elements: + self.parser.parseError("expected-closing-tag-but-got-eof") + break + # Stop parsing + + def processSpaceCharactersDropNewline(self, token): + # Sometimes (start of
, , and 
+
+
+ The debugger caught an exception in your WSGI application. You can now + look at the traceback which led to the error. + If you enable JavaScript you can also use additional features such as code + execution (if the evalex feature is enabled), automatic pasting of the + exceptions and much more. +
+""" + + FOOTER + + """ + +""" +) + +CONSOLE_HTML = ( + HEADER + + """\ +

Interactive Console

+
+In this console you can execute Python expressions in the context of the +application. The initial namespace was created by the debugger automatically. +
+
The Console requires JavaScript.
+""" + + FOOTER +) + +SUMMARY_HTML = """\ +
+ %(title)s +
    %(frames)s
+ %(description)s +
+""" + +FRAME_HTML = """\ +
+

File "%(filename)s", + line %(lineno)s, + in %(function_name)s

+
%(lines)s
+
+""" + +SOURCE_LINE_HTML = """\ + + %(lineno)s + %(code)s + +""" + + +def render_console_html(secret: str, evalex_trusted: bool = True) -> str: + return CONSOLE_HTML % { + "evalex": "true", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "true", + "title": "Console", + "secret": secret, + "traceback_id": -1, + } + + +def get_current_traceback( + ignore_system_exceptions: bool = False, + show_hidden_frames: bool = False, + skip: int = 0, +) -> "Traceback": + """Get the current exception info as `Traceback` object. Per default + calling this method will reraise system exceptions such as generator exit, + system exit or others. This behavior can be disabled by passing `False` + to the function as first parameter. + """ + info = t.cast( + t.Tuple[t.Type[BaseException], BaseException, TracebackType], sys.exc_info() + ) + exc_type, exc_value, tb = info + + if ignore_system_exceptions and exc_type in { + SystemExit, + KeyboardInterrupt, + GeneratorExit, + }: + raise + for _ in range(skip): + if tb.tb_next is None: + break + tb = tb.tb_next + tb = Traceback(exc_type, exc_value, tb) + if not show_hidden_frames: + tb.filter_hidden_frames() + return tb + + +class Line: + """Helper for the source renderer.""" + + __slots__ = ("lineno", "code", "in_frame", "current") + + def __init__(self, lineno: int, code: str) -> None: + self.lineno = lineno + self.code = code + self.in_frame = False + self.current = False + + @property + def classes(self) -> t.List[str]: + rv = ["line"] + if self.in_frame: + rv.append("in-frame") + if self.current: + rv.append("current") + return rv + + def render(self) -> str: + return SOURCE_LINE_HTML % { + "classes": " ".join(self.classes), + "lineno": self.lineno, + "code": escape(self.code), + } + + +class Traceback: + """Wraps a traceback.""" + + def __init__( + self, + exc_type: t.Type[BaseException], + exc_value: BaseException, + tb: TracebackType, + ) -> None: + self.exc_type = exc_type + self.exc_value = exc_value + self.tb = tb + + exception_type = exc_type.__name__ + if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}: + exception_type = f"{exc_type.__module__}.{exception_type}" + self.exception_type = exception_type + + self.groups = [] + memo = set() + while True: + self.groups.append(Group(exc_type, exc_value, tb)) + memo.add(id(exc_value)) + exc_value = exc_value.__cause__ or exc_value.__context__ # type: ignore + if exc_value is None or id(exc_value) in memo: + break + exc_type = type(exc_value) + tb = exc_value.__traceback__ # type: ignore + self.groups.reverse() + self.frames = [frame for group in self.groups for frame in group.frames] + + def filter_hidden_frames(self) -> None: + """Remove the frames according to the paste spec.""" + for group in self.groups: + group.filter_hidden_frames() + + self.frames[:] = [frame for group in self.groups for frame in group.frames] + + @property + def is_syntax_error(self) -> bool: + """Is it a syntax error?""" + return isinstance(self.exc_value, SyntaxError) + + @property + def exception(self) -> str: + """String representation of the final exception.""" + return self.groups[-1].exception + + def log(self, logfile: t.Optional[t.IO[str]] = None) -> None: + """Log the ASCII traceback into a file object.""" + if logfile is None: + logfile = sys.stderr + tb = f"{self.plaintext.rstrip()}\n" + logfile.write(tb) + + def render_summary(self, include_title: bool = True) -> str: + """Render the traceback for the interactive console.""" + title = "" + classes = ["traceback"] + if not self.frames: + classes.append("noframe-traceback") + frames = [] + else: + library_frames = sum(frame.is_library for frame in self.frames) + mark_lib = 0 < library_frames < len(self.frames) + frames = [group.render(mark_lib=mark_lib) for group in self.groups] + + if include_title: + if self.is_syntax_error: + title = "Syntax Error" + else: + title = "Traceback (most recent call last):" + + if self.is_syntax_error: + description = f"
{escape(self.exception)}
" + else: + description = f"
{escape(self.exception)}
" + + return SUMMARY_HTML % { + "classes": " ".join(classes), + "title": f"

{title if title else ''}

", + "frames": "\n".join(frames), + "description": description, + } + + def render_full( + self, + evalex: bool = False, + secret: t.Optional[str] = None, + evalex_trusted: bool = True, + ) -> str: + """Render the Full HTML page with the traceback info.""" + exc = escape(self.exception) + return PAGE_HTML % { + "evalex": "true" if evalex else "false", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "false", + "title": exc, + "exception": exc, + "exception_type": escape(self.exception_type), + "summary": self.render_summary(include_title=False), + "plaintext": escape(self.plaintext), + "plaintext_cs": re.sub("-{2,}", "-", self.plaintext), + "traceback_id": self.id, + "secret": secret, + } + + @cached_property + def plaintext(self) -> str: + return "\n".join([group.render_text() for group in self.groups]) + + @property + def id(self) -> int: + return id(self) + + +class Group: + """A group of frames for an exception in a traceback. If the + exception has a ``__cause__`` or ``__context__``, there are multiple + exception groups. + """ + + def __init__( + self, + exc_type: t.Type[BaseException], + exc_value: BaseException, + tb: TracebackType, + ) -> None: + self.exc_type = exc_type + self.exc_value = exc_value + self.info = None + if exc_value.__cause__ is not None: + self.info = ( + "The above exception was the direct cause of the following exception" + ) + elif exc_value.__context__ is not None: + self.info = ( + "During handling of the above exception, another exception occurred" + ) + + self.frames = [] + while tb is not None: + self.frames.append(Frame(exc_type, exc_value, tb)) + tb = tb.tb_next # type: ignore + + def filter_hidden_frames(self) -> None: + # An exception may not have a traceback to filter frames, such + # as one re-raised from ProcessPoolExecutor. + if not self.frames: + return + + new_frames: t.List[Frame] = [] + hidden = False + + for frame in self.frames: + hide = frame.hide + if hide in ("before", "before_and_this"): + new_frames = [] + hidden = False + if hide == "before_and_this": + continue + elif hide in ("reset", "reset_and_this"): + hidden = False + if hide == "reset_and_this": + continue + elif hide in ("after", "after_and_this"): + hidden = True + if hide == "after_and_this": + continue + elif hide or hidden: + continue + new_frames.append(frame) + + # if we only have one frame and that frame is from the codeop + # module, remove it. + if len(new_frames) == 1 and self.frames[0].module == "codeop": + del self.frames[:] + + # if the last frame is missing something went terrible wrong :( + elif self.frames[-1] in new_frames: + self.frames[:] = new_frames + + @property + def exception(self) -> str: + """String representation of the exception.""" + buf = traceback.format_exception_only(self.exc_type, self.exc_value) + rv = "".join(buf).strip() + return _to_str(rv, "utf-8", "replace") + + def render(self, mark_lib: bool = True) -> str: + out = [] + if self.info is not None: + out.append(f'
  • {self.info}:
    ') + for frame in self.frames: + title = f' title="{escape(frame.info)}"' if frame.info else "" + out.append(f"{frame.render(mark_lib=mark_lib)}") + return "\n".join(out) + + def render_text(self) -> str: + out = [] + if self.info is not None: + out.append(f"\n{self.info}:\n") + out.append("Traceback (most recent call last):") + for frame in self.frames: + out.append(frame.render_text()) + out.append(self.exception) + return "\n".join(out) + + +class Frame: + """A single frame in a traceback.""" + + def __init__( + self, + exc_type: t.Type[BaseException], + exc_value: BaseException, + tb: TracebackType, + ) -> None: + self.lineno = tb.tb_lineno + self.function_name = tb.tb_frame.f_code.co_name + self.locals = tb.tb_frame.f_locals + self.globals = tb.tb_frame.f_globals + + fn = inspect.getsourcefile(tb) or inspect.getfile(tb) + if fn[-4:] in (".pyo", ".pyc"): + fn = fn[:-1] + # if it's a file on the file system resolve the real filename. + if os.path.isfile(fn): + fn = os.path.realpath(fn) + self.filename = _to_str(fn, get_filesystem_encoding()) + self.module = self.globals.get("__name__", self.locals.get("__name__")) + self.loader = self.globals.get("__loader__", self.locals.get("__loader__")) + self.code = tb.tb_frame.f_code + + # support for paste's traceback extensions + self.hide = self.locals.get("__traceback_hide__", False) + info = self.locals.get("__traceback_info__") + if info is not None: + info = _to_str(info, "utf-8", "replace") + self.info = info + + def render(self, mark_lib: bool = True) -> str: + """Render a single frame in a traceback.""" + return FRAME_HTML % { + "id": self.id, + "filename": escape(self.filename), + "lineno": self.lineno, + "function_name": escape(self.function_name), + "lines": self.render_line_context(), + "library": "library" if mark_lib and self.is_library else "", + } + + @cached_property + def is_library(self) -> bool: + return any( + self.filename.startswith(os.path.realpath(path)) + for path in sysconfig.get_paths().values() + ) + + def render_text(self) -> str: + return ( + f' File "{self.filename}", line {self.lineno}, in {self.function_name}\n' + f" {self.current_line.strip()}" + ) + + def render_line_context(self) -> str: + before, current, after = self.get_context_lines() + rv = [] + + def render_line(line: str, cls: str) -> None: + line = line.expandtabs().rstrip() + stripped_line = line.strip() + prefix = len(line) - len(stripped_line) + rv.append( + f'
    {" " * prefix}'
    +                f"{escape(stripped_line) if stripped_line else ' '}
    " + ) + + for line in before: + render_line(line, "before") + render_line(current, "current") + for line in after: + render_line(line, "after") + + return "\n".join(rv) + + def get_annotated_lines(self) -> t.List[Line]: + """Helper function that returns lines with extra information.""" + lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)] + + # find function definition and mark lines + if hasattr(self.code, "co_firstlineno"): + lineno = self.code.co_firstlineno - 1 + while lineno > 0: + if _funcdef_re.match(lines[lineno].code): + break + lineno -= 1 + try: + offset = len(inspect.getblock([f"{x.code}\n" for x in lines[lineno:]])) + except TokenError: + offset = 0 + for line in lines[lineno : lineno + offset]: + line.in_frame = True + + # mark current line + try: + lines[self.lineno - 1].current = True + except IndexError: + pass + + return lines + + def eval(self, code: t.Union[str, CodeType], mode: str = "single") -> t.Any: + """Evaluate code in the context of the frame.""" + if isinstance(code, str): + code = compile(code, "", mode) + return eval(code, self.globals, self.locals) + + @cached_property + def sourcelines(self) -> t.List[str]: + """The sourcecode of the file as list of strings.""" + # get sourcecode from loader or file + source = None + if self.loader is not None: + try: + if hasattr(self.loader, "get_source"): + source = self.loader.get_source(self.module) + elif hasattr(self.loader, "get_source_by_code"): + source = self.loader.get_source_by_code(self.code) + except Exception: + # we munch the exception so that we don't cause troubles + # if the loader is broken. + pass + + if source is None: + try: + with open(self.filename, mode="rb") as f: + source = f.read() + except OSError: + return [] + + # already str? return right away + if isinstance(source, str): + return source.splitlines() + + charset = "utf-8" + if source.startswith(codecs.BOM_UTF8): + source = source[3:] + else: + for idx, match in enumerate(_line_re.finditer(source)): + coding_match = _coding_re.search(match.group()) + if coding_match is not None: + charset = coding_match.group(1).decode("utf-8") + break + if idx > 1: + break + + # on broken cookies we fall back to utf-8 too + charset = _to_str(charset) + try: + codecs.lookup(charset) + except LookupError: + charset = "utf-8" + + return source.decode(charset, "replace").splitlines() + + def get_context_lines( + self, context: int = 5 + ) -> t.Tuple[t.List[str], str, t.List[str]]: + before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1] + past = self.sourcelines[self.lineno : self.lineno + context] + return (before, self.current_line, past) + + @property + def current_line(self) -> str: + try: + return self.sourcelines[self.lineno - 1] + except IndexError: + return "" + + @cached_property + def console(self) -> Console: + return Console(self.globals, self.locals) + + @property + def id(self) -> int: + return id(self) diff --git a/venv/lib/python3.8/site-packages/werkzeug/exceptions.py b/venv/lib/python3.8/site-packages/werkzeug/exceptions.py new file mode 100644 index 0000000..16c3964 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/exceptions.py @@ -0,0 +1,943 @@ +"""Implements a number of Python exceptions which can be raised from within +a view to trigger a standard HTTP non-200 response. + +Usage Example +------------- + +.. code-block:: python + + from werkzeug.wrappers.request import Request + from werkzeug.exceptions import HTTPException, NotFound + + def view(request): + raise NotFound() + + @Request.application + def application(request): + try: + return view(request) + except HTTPException as e: + return e + +As you can see from this example those exceptions are callable WSGI +applications. However, they are not Werkzeug response objects. You +can get a response object by calling ``get_response()`` on a HTTP +exception. + +Keep in mind that you may have to pass an environ (WSGI) or scope +(ASGI) to ``get_response()`` because some errors fetch additional +information relating to the request. + +If you want to hook in a different exception page to say, a 404 status +code, you can add a second except for a specific subclass of an error: + +.. code-block:: python + + @Request.application + def application(request): + try: + return view(request) + except NotFound as e: + return not_found(request) + except HTTPException as e: + return e + +""" +import sys +import typing as t +import warnings +from datetime import datetime +from html import escape + +from ._internal import _get_environ + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIEnvironment + from .datastructures import WWWAuthenticate + from .sansio.response import Response + from .wrappers.response import Response as WSGIResponse # noqa: F401 + + +class HTTPException(Exception): + """The base class for all HTTP exceptions. This exception can be called as a WSGI + application to render a default error page or you can catch the subclasses + of it independently and render nicer error messages. + """ + + code: t.Optional[int] = None + description: t.Optional[str] = None + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + super().__init__() + if description is not None: + self.description = description + self.response = response + + @classmethod + def wrap( + cls, exception: t.Type[BaseException], name: t.Optional[str] = None + ) -> t.Type["HTTPException"]: + """Create an exception that is a subclass of the calling HTTP + exception and the ``exception`` argument. + + The first argument to the class will be passed to the + wrapped ``exception``, the rest to the HTTP exception. If + ``e.args`` is not empty and ``e.show_exception`` is ``True``, + the wrapped exception message is added to the HTTP error + description. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Create a subclass manually + instead. + + .. versionchanged:: 0.15.5 + The ``show_exception`` attribute controls whether the + description includes the wrapped exception message. + + .. versionchanged:: 0.15.0 + The description includes the wrapped exception message. + """ + warnings.warn( + "'HTTPException.wrap' is deprecated and will be removed in" + " Werkzeug 2.1. Create a subclass manually instead.", + DeprecationWarning, + stacklevel=2, + ) + + class newcls(cls, exception): # type: ignore + _description = cls.description + show_exception = False + + def __init__( + self, arg: t.Optional[t.Any] = None, *args: t.Any, **kwargs: t.Any + ) -> None: + super().__init__(*args, **kwargs) + + if arg is None: + exception.__init__(self) + else: + exception.__init__(self, arg) + + @property + def description(self) -> str: + if self.show_exception: + return ( + f"{self._description}\n" + f"{exception.__name__}: {exception.__str__(self)}" + ) + + return self._description # type: ignore + + @description.setter + def description(self, value: str) -> None: + self._description = value + + newcls.__module__ = sys._getframe(1).f_globals["__name__"] + name = name or cls.__name__ + exception.__name__ + newcls.__name__ = newcls.__qualname__ = name + return newcls + + @property + def name(self) -> str: + """The status name.""" + from .http import HTTP_STATUS_CODES + + return HTTP_STATUS_CODES.get(self.code, "Unknown Error") # type: ignore + + def get_description( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> str: + """Get the description.""" + if self.description is None: + description = "" + elif not isinstance(self.description, str): + description = str(self.description) + else: + description = self.description + + description = escape(description).replace("\n", "
    ") + return f"

    {description}

    " + + def get_body( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> str: + """Get the HTML body.""" + return ( + '\n' + f"{self.code} {escape(self.name)}\n" + f"

    {escape(self.name)}

    \n" + f"{self.get_description(environ)}\n" + ) + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + """Get a list of headers.""" + return [("Content-Type", "text/html; charset=utf-8")] + + def get_response( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> "Response": + """Get a response object. If one was passed to the exception + it's returned directly. + + :param environ: the optional environ for the request. This + can be used to modify the response depending + on how the request looked like. + :return: a :class:`Response` object or a subclass thereof. + """ + from .wrappers.response import Response as WSGIResponse # noqa: F811 + + if self.response is not None: + return self.response + if environ is not None: + environ = _get_environ(environ) + headers = self.get_headers(environ, scope) + return WSGIResponse(self.get_body(environ, scope), self.code, headers) + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + """Call the exception as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + """ + response = t.cast("WSGIResponse", self.get_response(environ)) + return response(environ, start_response) + + def __str__(self) -> str: + code = self.code if self.code is not None else "???" + return f"{code} {self.name}: {self.description}" + + def __repr__(self) -> str: + code = self.code if self.code is not None else "???" + return f"<{type(self).__name__} '{code}: {self.name}'>" + + +class BadRequest(HTTPException): + """*400* `Bad Request` + + Raise if the browser sends something to the application the application + or server cannot handle. + """ + + code = 400 + description = ( + "The browser (or proxy) sent a request that this server could " + "not understand." + ) + + +class BadRequestKeyError(BadRequest, KeyError): + """An exception that is used to signal both a :exc:`KeyError` and a + :exc:`BadRequest`. Used by many of the datastructures. + """ + + _description = BadRequest.description + #: Show the KeyError along with the HTTP error message in the + #: response. This should be disabled in production, but can be + #: useful in a debug mode. + show_exception = False + + def __init__(self, arg: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any): + super().__init__(*args, **kwargs) + + if arg is None: + KeyError.__init__(self) + else: + KeyError.__init__(self, arg) + + @property # type: ignore + def description(self) -> str: # type: ignore + if self.show_exception: + return ( + f"{self._description}\n" + f"{KeyError.__name__}: {KeyError.__str__(self)}" + ) + + return self._description + + @description.setter + def description(self, value: str) -> None: + self._description = value + + +class ClientDisconnected(BadRequest): + """Internal exception that is raised if Werkzeug detects a disconnected + client. Since the client is already gone at that point attempting to + send the error message to the client might not work and might ultimately + result in another exception in the server. Mainly this is here so that + it is silenced by default as far as Werkzeug is concerned. + + Since disconnections cannot be reliably detected and are unspecified + by WSGI to a large extent this might or might not be raised if a client + is gone. + + .. versionadded:: 0.8 + """ + + +class SecurityError(BadRequest): + """Raised if something triggers a security error. This is otherwise + exactly like a bad request error. + + .. versionadded:: 0.9 + """ + + +class BadHost(BadRequest): + """Raised if the submitted host is badly formatted. + + .. versionadded:: 0.11.2 + """ + + +class Unauthorized(HTTPException): + """*401* ``Unauthorized`` + + Raise if the user is not authorized to access a resource. + + The ``www_authenticate`` argument should be used to set the + ``WWW-Authenticate`` header. This is used for HTTP basic auth and + other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate` + to create correctly formatted values. Strictly speaking a 401 + response is invalid if it doesn't provide at least one value for + this header, although real clients typically don't care. + + :param description: Override the default message used for the body + of the response. + :param www-authenticate: A single value, or list of values, for the + WWW-Authenticate header(s). + + .. versionchanged:: 2.0 + Serialize multiple ``www_authenticate`` items into multiple + ``WWW-Authenticate`` headers, rather than joining them + into a single value, for better interoperability. + + .. versionchanged:: 0.15.3 + If the ``www_authenticate`` argument is not set, the + ``WWW-Authenticate`` header is not set. + + .. versionchanged:: 0.15.3 + The ``response`` argument was restored. + + .. versionchanged:: 0.15.1 + ``description`` was moved back as the first argument, restoring + its previous position. + + .. versionchanged:: 0.15.0 + ``www_authenticate`` was added as the first argument, ahead of + ``description``. + """ + + code = 401 + description = ( + "The server could not verify that you are authorized to access" + " the URL requested. You either supplied the wrong credentials" + " (e.g. a bad password), or your browser doesn't understand" + " how to supply the credentials required." + ) + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + www_authenticate: t.Optional[ + t.Union["WWWAuthenticate", t.Iterable["WWWAuthenticate"]] + ] = None, + ) -> None: + super().__init__(description, response) + + from .datastructures import WWWAuthenticate + + if isinstance(www_authenticate, WWWAuthenticate): + www_authenticate = (www_authenticate,) + + self.www_authenticate = www_authenticate + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.www_authenticate: + headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate) + return headers + + +class Forbidden(HTTPException): + """*403* `Forbidden` + + Raise if the user doesn't have the permission for the requested resource + but was authenticated. + """ + + code = 403 + description = ( + "You don't have the permission to access the requested" + " resource. It is either read-protected or not readable by the" + " server." + ) + + +class NotFound(HTTPException): + """*404* `Not Found` + + Raise if a resource does not exist and never existed. + """ + + code = 404 + description = ( + "The requested URL was not found on the server. If you entered" + " the URL manually please check your spelling and try again." + ) + + +class MethodNotAllowed(HTTPException): + """*405* `Method Not Allowed` + + Raise if the server used a method the resource does not handle. For + example `POST` if the resource is view only. Especially useful for REST. + + The first argument for this exception should be a list of allowed methods. + Strictly speaking the response would be invalid if you don't provide valid + methods in the header which you can do with that list. + """ + + code = 405 + description = "The method is not allowed for the requested URL." + + def __init__( + self, + valid_methods: t.Optional[t.Iterable[str]] = None, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + """Takes an optional list of valid http methods + starting with werkzeug 0.3 the list will be mandatory.""" + super().__init__(description=description, response=response) + self.valid_methods = valid_methods + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.valid_methods: + headers.append(("Allow", ", ".join(self.valid_methods))) + return headers + + +class NotAcceptable(HTTPException): + """*406* `Not Acceptable` + + Raise if the server can't return any content conforming to the + `Accept` headers of the client. + """ + + code = 406 + description = ( + "The resource identified by the request is only capable of" + " generating response entities which have content" + " characteristics not acceptable according to the accept" + " headers sent in the request." + ) + + +class RequestTimeout(HTTPException): + """*408* `Request Timeout` + + Raise to signalize a timeout. + """ + + code = 408 + description = ( + "The server closed the network connection because the browser" + " didn't finish the request within the specified time." + ) + + +class Conflict(HTTPException): + """*409* `Conflict` + + Raise to signal that a request cannot be completed because it conflicts + with the current state on the server. + + .. versionadded:: 0.7 + """ + + code = 409 + description = ( + "A conflict happened while processing the request. The" + " resource might have been modified while the request was being" + " processed." + ) + + +class Gone(HTTPException): + """*410* `Gone` + + Raise if a resource existed previously and went away without new location. + """ + + code = 410 + description = ( + "The requested URL is no longer available on this server and" + " there is no forwarding address. If you followed a link from a" + " foreign page, please contact the author of this page." + ) + + +class LengthRequired(HTTPException): + """*411* `Length Required` + + Raise if the browser submitted data but no ``Content-Length`` header which + is required for the kind of processing the server does. + """ + + code = 411 + description = ( + "A request with this method requires a valid Content-" + "Length header." + ) + + +class PreconditionFailed(HTTPException): + """*412* `Precondition Failed` + + Status code used in combination with ``If-Match``, ``If-None-Match``, or + ``If-Unmodified-Since``. + """ + + code = 412 + description = ( + "The precondition on the request for the URL failed positive evaluation." + ) + + +class RequestEntityTooLarge(HTTPException): + """*413* `Request Entity Too Large` + + The status code one should return if the data submitted exceeded a given + limit. + """ + + code = 413 + description = "The data value transmitted exceeds the capacity limit." + + +class RequestURITooLarge(HTTPException): + """*414* `Request URI Too Large` + + Like *413* but for too long URLs. + """ + + code = 414 + description = ( + "The length of the requested URL exceeds the capacity limit for" + " this server. The request cannot be processed." + ) + + +class UnsupportedMediaType(HTTPException): + """*415* `Unsupported Media Type` + + The status code returned if the server is unable to handle the media type + the client transmitted. + """ + + code = 415 + description = ( + "The server does not support the media type transmitted in the request." + ) + + +class RequestedRangeNotSatisfiable(HTTPException): + """*416* `Requested Range Not Satisfiable` + + The client asked for an invalid part of the file. + + .. versionadded:: 0.7 + """ + + code = 416 + description = "The server cannot provide the requested range." + + def __init__( + self, + length: t.Optional[int] = None, + units: str = "bytes", + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + ) -> None: + """Takes an optional `Content-Range` header value based on ``length`` + parameter. + """ + super().__init__(description=description, response=response) + self.length = length + self.units = units + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.length is not None: + headers.append(("Content-Range", f"{self.units} */{self.length}")) + return headers + + +class ExpectationFailed(HTTPException): + """*417* `Expectation Failed` + + The server cannot meet the requirements of the Expect request-header. + + .. versionadded:: 0.7 + """ + + code = 417 + description = "The server could not meet the requirements of the Expect header" + + +class ImATeapot(HTTPException): + """*418* `I'm a teapot` + + The server should return this if it is a teapot and someone attempted + to brew coffee with it. + + .. versionadded:: 0.7 + """ + + code = 418 + description = "This server is a teapot, not a coffee machine" + + +class UnprocessableEntity(HTTPException): + """*422* `Unprocessable Entity` + + Used if the request is well formed, but the instructions are otherwise + incorrect. + """ + + code = 422 + description = ( + "The request was well-formed but was unable to be followed due" + " to semantic errors." + ) + + +class Locked(HTTPException): + """*423* `Locked` + + Used if the resource that is being accessed is locked. + """ + + code = 423 + description = "The resource that is being accessed is locked." + + +class FailedDependency(HTTPException): + """*424* `Failed Dependency` + + Used if the method could not be performed on the resource + because the requested action depended on another action and that action failed. + """ + + code = 424 + description = ( + "The method could not be performed on the resource because the" + " requested action depended on another action and that action" + " failed." + ) + + +class PreconditionRequired(HTTPException): + """*428* `Precondition Required` + + The server requires this request to be conditional, typically to prevent + the lost update problem, which is a race condition between two or more + clients attempting to update a resource through PUT or DELETE. By requiring + each client to include a conditional header ("If-Match" or "If-Unmodified- + Since") with the proper value retained from a recent GET request, the + server ensures that each client has at least seen the previous revision of + the resource. + """ + + code = 428 + description = ( + "This request is required to be conditional; try using" + ' "If-Match" or "If-Unmodified-Since".' + ) + + +class _RetryAfter(HTTPException): + """Adds an optional ``retry_after`` parameter which will set the + ``Retry-After`` header. May be an :class:`int` number of seconds or + a :class:`~datetime.datetime`. + """ + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + retry_after: t.Optional[t.Union[datetime, int]] = None, + ) -> None: + super().__init__(description, response) + self.retry_after = retry_after + + def get_headers( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> t.List[t.Tuple[str, str]]: + headers = super().get_headers(environ, scope) + + if self.retry_after: + if isinstance(self.retry_after, datetime): + from .http import http_date + + value = http_date(self.retry_after) + else: + value = str(self.retry_after) + + headers.append(("Retry-After", value)) + + return headers + + +class TooManyRequests(_RetryAfter): + """*429* `Too Many Requests` + + The server is limiting the rate at which this user receives + responses, and this request exceeds that rate. (The server may use + any convenient method to identify users and their request rates). + The server may include a "Retry-After" header to indicate how long + the user should wait before retrying. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 429 + description = "This user has exceeded an allotted request count. Try again later." + + +class RequestHeaderFieldsTooLarge(HTTPException): + """*431* `Request Header Fields Too Large` + + The server refuses to process the request because the header fields are too + large. One or more individual fields may be too large, or the set of all + headers is too large. + """ + + code = 431 + description = "One or more header fields exceeds the maximum size." + + +class UnavailableForLegalReasons(HTTPException): + """*451* `Unavailable For Legal Reasons` + + This status code indicates that the server is denying access to the + resource as a consequence of a legal demand. + """ + + code = 451 + description = "Unavailable for legal reasons." + + +class InternalServerError(HTTPException): + """*500* `Internal Server Error` + + Raise if an internal server error occurred. This is a good fallback if an + unknown error occurred in the dispatcher. + + .. versionchanged:: 1.0.0 + Added the :attr:`original_exception` attribute. + """ + + code = 500 + description = ( + "The server encountered an internal error and was unable to" + " complete your request. Either the server is overloaded or" + " there is an error in the application." + ) + + def __init__( + self, + description: t.Optional[str] = None, + response: t.Optional["Response"] = None, + original_exception: t.Optional[BaseException] = None, + ) -> None: + #: The original exception that caused this 500 error. Can be + #: used by frameworks to provide context when handling + #: unexpected errors. + self.original_exception = original_exception + super().__init__(description=description, response=response) + + +class NotImplemented(HTTPException): + """*501* `Not Implemented` + + Raise if the application does not support the action requested by the + browser. + """ + + code = 501 + description = "The server does not support the action requested by the browser." + + +class BadGateway(HTTPException): + """*502* `Bad Gateway` + + If you do proxying in your application you should return this status code + if you received an invalid response from the upstream server it accessed + in attempting to fulfill the request. + """ + + code = 502 + description = ( + "The proxy server received an invalid response from an upstream server." + ) + + +class ServiceUnavailable(_RetryAfter): + """*503* `Service Unavailable` + + Status code you should return if a service is temporarily + unavailable. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 503 + description = ( + "The server is temporarily unable to service your request due" + " to maintenance downtime or capacity problems. Please try" + " again later." + ) + + +class GatewayTimeout(HTTPException): + """*504* `Gateway Timeout` + + Status code you should return if a connection to an upstream server + times out. + """ + + code = 504 + description = "The connection to an upstream server timed out." + + +class HTTPVersionNotSupported(HTTPException): + """*505* `HTTP Version Not Supported` + + The server does not support the HTTP protocol version used in the request. + """ + + code = 505 + description = ( + "The server does not support the HTTP protocol version used in the request." + ) + + +default_exceptions: t.Dict[int, t.Type[HTTPException]] = {} + + +def _find_exceptions() -> None: + for obj in globals().values(): + try: + is_http_exception = issubclass(obj, HTTPException) + except TypeError: + is_http_exception = False + if not is_http_exception or obj.code is None: + continue + old_obj = default_exceptions.get(obj.code, None) + if old_obj is not None and issubclass(obj, old_obj): + continue + default_exceptions[obj.code] = obj + + +_find_exceptions() +del _find_exceptions + + +class Aborter: + """When passed a dict of code -> exception items it can be used as + callable that raises exceptions. If the first argument to the + callable is an integer it will be looked up in the mapping, if it's + a WSGI application it will be raised in a proxy exception. + + The rest of the arguments are forwarded to the exception constructor. + """ + + def __init__( + self, + mapping: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, + extra: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, + ) -> None: + if mapping is None: + mapping = default_exceptions + self.mapping = dict(mapping) + if extra is not None: + self.mapping.update(extra) + + def __call__( + self, code: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": + from .sansio.response import Response + + if isinstance(code, Response): + raise HTTPException(response=code) + + if code not in self.mapping: + raise LookupError(f"no exception for {code!r}") + + raise self.mapping[code](*args, **kwargs) + + +def abort( + status: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any +) -> "te.NoReturn": + """Raises an :py:exc:`HTTPException` for the given status code or WSGI + application. + + If a status code is given, it will be looked up in the list of + exceptions and will raise that exception. If passed a WSGI application, + it will wrap it in a proxy WSGI exception and raise that:: + + abort(404) # 404 Not Found + abort(Response('Hello World')) + + """ + _aborter(status, *args, **kwargs) + + +_aborter: Aborter = Aborter() diff --git a/venv/lib/python3.8/site-packages/werkzeug/filesystem.py b/venv/lib/python3.8/site-packages/werkzeug/filesystem.py new file mode 100644 index 0000000..36a3d12 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/filesystem.py @@ -0,0 +1,55 @@ +import codecs +import sys +import typing as t +import warnings + +# We do not trust traditional unixes. +has_likely_buggy_unicode_filesystem = ( + sys.platform.startswith("linux") or "bsd" in sys.platform +) + + +def _is_ascii_encoding(encoding: t.Optional[str]) -> bool: + """Given an encoding this figures out if the encoding is actually ASCII (which + is something we don't actually want in most cases). This is necessary + because ASCII comes under many names such as ANSI_X3.4-1968. + """ + if encoding is None: + return False + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): + """The warning used by Werkzeug to signal a broken filesystem. Will only be + used once per runtime.""" + + +_warned_about_filesystem_encoding = False + + +def get_filesystem_encoding() -> str: + """Returns the filesystem encoding that should be used. Note that this is + different from the Python understanding of the filesystem encoding which + might be deeply flawed. Do not use this value against Python's string APIs + because it might be different. See :ref:`filesystem-encoding` for the exact + behavior. + + The concept of a filesystem encoding in generally is not something you + should rely on. As such if you ever need to use this function except for + writing wrapper code reconsider. + """ + global _warned_about_filesystem_encoding + rv = sys.getfilesystemencoding() + if has_likely_buggy_unicode_filesystem and not rv or _is_ascii_encoding(rv): + if not _warned_about_filesystem_encoding: + warnings.warn( + "Detected a misconfigured UNIX filesystem: Will use" + f" UTF-8 as filesystem encoding instead of {rv!r}", + BrokenFilesystemWarning, + ) + _warned_about_filesystem_encoding = True + return "utf-8" + return rv diff --git a/venv/lib/python3.8/site-packages/werkzeug/formparser.py b/venv/lib/python3.8/site-packages/werkzeug/formparser.py new file mode 100644 index 0000000..6cb758f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/formparser.py @@ -0,0 +1,495 @@ +import typing as t +import warnings +from functools import update_wrapper +from io import BytesIO +from itertools import chain +from typing import Union + +from . import exceptions +from ._internal import _to_str +from .datastructures import FileStorage +from .datastructures import Headers +from .datastructures import MultiDict +from .http import parse_options_header +from .sansio.multipart import Data +from .sansio.multipart import Epilogue +from .sansio.multipart import Field +from .sansio.multipart import File +from .sansio.multipart import MultipartDecoder +from .sansio.multipart import NeedData +from .urls import url_decode_stream +from .wsgi import _make_chunk_iter +from .wsgi import get_content_length +from .wsgi import get_input_stream + +# there are some platforms where SpooledTemporaryFile is not available. +# In that case we need to provide a fallback. +try: + from tempfile import SpooledTemporaryFile +except ImportError: + from tempfile import TemporaryFile + + SpooledTemporaryFile = None # type: ignore + +if t.TYPE_CHECKING: + import typing as te + from _typeshed.wsgi import WSGIEnvironment + + t_parse_result = t.Tuple[t.IO[bytes], MultiDict, MultiDict] + + class TStreamFactory(te.Protocol): + def __call__( + self, + total_content_length: t.Optional[int], + content_type: t.Optional[str], + filename: t.Optional[str], + content_length: t.Optional[int] = None, + ) -> t.IO[bytes]: + ... + + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def _exhaust(stream: t.IO[bytes]) -> None: + bts = stream.read(64 * 1024) + while bts: + bts = stream.read(64 * 1024) + + +def default_stream_factory( + total_content_length: t.Optional[int], + content_type: t.Optional[str], + filename: t.Optional[str], + content_length: t.Optional[int] = None, +) -> t.IO[bytes]: + max_size = 1024 * 500 + + if SpooledTemporaryFile is not None: + return t.cast(t.IO[bytes], SpooledTemporaryFile(max_size=max_size, mode="rb+")) + elif total_content_length is None or total_content_length > max_size: + return t.cast(t.IO[bytes], TemporaryFile("rb+")) + + return BytesIO() + + +def parse_form_data( + environ: "WSGIEnvironment", + stream_factory: t.Optional["TStreamFactory"] = None, + charset: str = "utf-8", + errors: str = "replace", + max_form_memory_size: t.Optional[int] = None, + max_content_length: t.Optional[int] = None, + cls: t.Optional[t.Type[MultiDict]] = None, + silent: bool = True, +) -> "t_parse_result": + """Parse the form data in the environ and return it as tuple in the form + ``(stream, form, files)``. You should only call this method if the + transport method is `POST`, `PUT`, or `PATCH`. + + If the mimetype of the data transmitted is `multipart/form-data` the + files multidict will be filled with `FileStorage` objects. If the + mimetype is unknown the input stream is wrapped and returned as first + argument, else the stream is empty. + + This is a shortcut for the common usage of :class:`FormDataParser`. + + Have a look at :doc:`/request_data` for more details. + + .. versionadded:: 0.5 + The `max_form_memory_size`, `max_content_length` and + `cls` parameters were added. + + .. versionadded:: 0.5.1 + The optional `silent` flag was added. + + :param environ: the WSGI environment to be used for parsing. + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`Response._get_file_stream`. + :param charset: The character set for URL and url encoded form data. + :param errors: The encoding error behavior. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + :return: A tuple in the form ``(stream, form, files)``. + """ + return FormDataParser( + stream_factory, + charset, + errors, + max_form_memory_size, + max_content_length, + cls, + silent, + ).parse_from_environ(environ) + + +def exhaust_stream(f: F) -> F: + """Helper decorator for methods that exhausts the stream on return.""" + + def wrapper(self, stream, *args, **kwargs): # type: ignore + try: + return f(self, stream, *args, **kwargs) + finally: + exhaust = getattr(stream, "exhaust", None) + + if exhaust is not None: + exhaust() + else: + while True: + chunk = stream.read(1024 * 64) + + if not chunk: + break + + return update_wrapper(t.cast(F, wrapper), f) + + +class FormDataParser: + """This class implements parsing of form data for Werkzeug. By itself + it can parse multipart and url encoded form data. It can be subclassed + and extended but for most mimetypes it is a better idea to use the + untouched stream and expose it as separate attributes on a request + object. + + .. versionadded:: 0.8 + + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`Response._get_file_stream`. + :param charset: The character set for URL and url encoded form data. + :param errors: The encoding error behavior. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + """ + + def __init__( + self, + stream_factory: t.Optional["TStreamFactory"] = None, + charset: str = "utf-8", + errors: str = "replace", + max_form_memory_size: t.Optional[int] = None, + max_content_length: t.Optional[int] = None, + cls: t.Optional[t.Type[MultiDict]] = None, + silent: bool = True, + ) -> None: + if stream_factory is None: + stream_factory = default_stream_factory + + self.stream_factory = stream_factory + self.charset = charset + self.errors = errors + self.max_form_memory_size = max_form_memory_size + self.max_content_length = max_content_length + + if cls is None: + cls = MultiDict + + self.cls = cls + self.silent = silent + + def get_parse_func( + self, mimetype: str, options: t.Dict[str, str] + ) -> t.Optional[ + t.Callable[ + ["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]], + "t_parse_result", + ] + ]: + return self.parse_functions.get(mimetype) + + def parse_from_environ(self, environ: "WSGIEnvironment") -> "t_parse_result": + """Parses the information from the environment as form data. + + :param environ: the WSGI environment to be used for parsing. + :return: A tuple in the form ``(stream, form, files)``. + """ + content_type = environ.get("CONTENT_TYPE", "") + content_length = get_content_length(environ) + mimetype, options = parse_options_header(content_type) + return self.parse(get_input_stream(environ), mimetype, content_length, options) + + def parse( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: t.Optional[int], + options: t.Optional[t.Dict[str, str]] = None, + ) -> "t_parse_result": + """Parses the information from the given stream, mimetype, + content length and mimetype parameters. + + :param stream: an input stream + :param mimetype: the mimetype of the data + :param content_length: the content length of the incoming data + :param options: optional mimetype parameters (used for + the multipart boundary for instance) + :return: A tuple in the form ``(stream, form, files)``. + """ + if ( + self.max_content_length is not None + and content_length is not None + and content_length > self.max_content_length + ): + # if the input stream is not exhausted, firefox reports Connection Reset + _exhaust(stream) + raise exceptions.RequestEntityTooLarge() + + if options is None: + options = {} + + parse_func = self.get_parse_func(mimetype, options) + + if parse_func is not None: + try: + return parse_func(self, stream, mimetype, content_length, options) + except ValueError: + if not self.silent: + raise + + return stream, self.cls(), self.cls() + + @exhaust_stream + def _parse_multipart( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: t.Optional[int], + options: t.Dict[str, str], + ) -> "t_parse_result": + parser = MultiPartParser( + self.stream_factory, + self.charset, + self.errors, + max_form_memory_size=self.max_form_memory_size, + cls=self.cls, + ) + boundary = options.get("boundary", "").encode("ascii") + + if not boundary: + raise ValueError("Missing boundary") + + form, files = parser.parse(stream, boundary, content_length) + return stream, form, files + + @exhaust_stream + def _parse_urlencoded( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: t.Optional[int], + options: t.Dict[str, str], + ) -> "t_parse_result": + if ( + self.max_form_memory_size is not None + and content_length is not None + and content_length > self.max_form_memory_size + ): + # if the input stream is not exhausted, firefox reports Connection Reset + _exhaust(stream) + raise exceptions.RequestEntityTooLarge() + + form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls) + return stream, form, self.cls() + + #: mapping of mimetypes to parsing functions + parse_functions: t.Dict[ + str, + t.Callable[ + ["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]], + "t_parse_result", + ], + ] = { + "multipart/form-data": _parse_multipart, + "application/x-www-form-urlencoded": _parse_urlencoded, + "application/x-url-encoded": _parse_urlencoded, + } + + +def _line_parse(line: str) -> t.Tuple[str, bool]: + """Removes line ending characters and returns a tuple (`stripped_line`, + `is_terminated`). + """ + if line[-2:] == "\r\n": + return line[:-2], True + + elif line[-1:] in {"\r", "\n"}: + return line[:-1], True + + return line, False + + +def parse_multipart_headers(iterable: t.Iterable[bytes]) -> Headers: + """Parses multipart headers from an iterable that yields lines (including + the trailing newline symbol). The iterable has to be newline terminated. + The iterable will stop at the line where the headers ended so it can be + further consumed. + :param iterable: iterable of strings that are newline terminated + """ + warnings.warn( + "'parse_multipart_headers' is deprecated and will be removed in" + " Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + result: t.List[t.Tuple[str, str]] = [] + + for b_line in iterable: + line = _to_str(b_line) + line, line_terminated = _line_parse(line) + + if not line_terminated: + raise ValueError("unexpected end of line in multipart header") + + if not line: + break + elif line[0] in " \t" and result: + key, value = result[-1] + result[-1] = (key, f"{value}\n {line[1:]}") + else: + parts = line.split(":", 1) + + if len(parts) == 2: + result.append((parts[0].strip(), parts[1].strip())) + + # we link the list to the headers, no need to create a copy, the + # list was not shared anyways. + return Headers(result) + + +class MultiPartParser: + def __init__( + self, + stream_factory: t.Optional["TStreamFactory"] = None, + charset: str = "utf-8", + errors: str = "replace", + max_form_memory_size: t.Optional[int] = None, + cls: t.Optional[t.Type[MultiDict]] = None, + buffer_size: int = 64 * 1024, + ) -> None: + self.charset = charset + self.errors = errors + self.max_form_memory_size = max_form_memory_size + + if stream_factory is None: + stream_factory = default_stream_factory + + self.stream_factory = stream_factory + + if cls is None: + cls = MultiDict + + self.cls = cls + + self.buffer_size = buffer_size + + def fail(self, message: str) -> "te.NoReturn": + raise ValueError(message) + + def get_part_charset(self, headers: Headers) -> str: + # Figure out input charset for current part + content_type = headers.get("content-type") + + if content_type: + mimetype, ct_params = parse_options_header(content_type) + return ct_params.get("charset", self.charset) + + return self.charset + + def start_file_streaming( + self, event: File, total_content_length: t.Optional[int] + ) -> t.IO[bytes]: + content_type = event.headers.get("content-type") + + try: + content_length = int(event.headers["content-length"]) + except (KeyError, ValueError): + content_length = 0 + + container = self.stream_factory( + total_content_length=total_content_length, + filename=event.filename, + content_type=content_type, + content_length=content_length, + ) + return container + + def parse( + self, stream: t.IO[bytes], boundary: bytes, content_length: t.Optional[int] + ) -> t.Tuple[MultiDict, MultiDict]: + container: t.Union[t.IO[bytes], t.List[bytes]] + _write: t.Callable[[bytes], t.Any] + + iterator = chain( + _make_chunk_iter( + stream, + limit=content_length, + buffer_size=self.buffer_size, + ), + [None], + ) + + parser = MultipartDecoder(boundary, self.max_form_memory_size) + + fields = [] + files = [] + + current_part: Union[Field, File] + for data in iterator: + parser.receive_data(data) + event = parser.next_event() + while not isinstance(event, (Epilogue, NeedData)): + if isinstance(event, Field): + current_part = event + container = [] + _write = container.append + elif isinstance(event, File): + current_part = event + container = self.start_file_streaming(event, content_length) + _write = container.write + elif isinstance(event, Data): + _write(event.data) + if not event.more_data: + if isinstance(current_part, Field): + value = b"".join(container).decode( + self.get_part_charset(current_part.headers), self.errors + ) + fields.append((current_part.name, value)) + else: + container = t.cast(t.IO[bytes], container) + container.seek(0) + files.append( + ( + current_part.name, + FileStorage( + container, + current_part.filename, + current_part.name, + headers=current_part.headers, + ), + ) + ) + + event = parser.next_event() + + return self.cls(fields), self.cls(files) diff --git a/venv/lib/python3.8/site-packages/werkzeug/http.py b/venv/lib/python3.8/site-packages/werkzeug/http.py new file mode 100644 index 0000000..ca48fe2 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/http.py @@ -0,0 +1,1388 @@ +import base64 +import email.utils +import re +import typing +import typing as t +import warnings +from datetime import date +from datetime import datetime +from datetime import time +from datetime import timedelta +from datetime import timezone +from enum import Enum +from hashlib import sha1 +from time import mktime +from time import struct_time +from urllib.parse import unquote_to_bytes as _unquote +from urllib.request import parse_http_list as _parse_list_header + +from ._internal import _cookie_parse_impl +from ._internal import _cookie_quote +from ._internal import _make_cookie_domain +from ._internal import _to_bytes +from ._internal import _to_str +from ._internal import _wsgi_decoding_dance +from werkzeug._internal import _dt_as_utc + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import WSGIEnvironment + +# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231 +_accept_re = re.compile( + r""" + ( # media-range capturing-parenthesis + [^\s;,]+ # type/subtype + (?:[ \t]*;[ \t]* # ";" + (?: # parameter non-capturing-parenthesis + [^\s;,q][^\s;,]* # token that doesn't start with "q" + | # or + q[^\s;,=][^\s;,]* # token that is more than just "q" + ) + )* # zero or more parameters + ) # end of media-range + (?:[ \t]*;[ \t]*q= # weight is a "q" parameter + (\d*(?:\.\d+)?) # qvalue capturing-parentheses + [^,]* # "extension" accept params: who cares? + )? # accept params are optional + """, + re.VERBOSE, +) +_token_chars = frozenset( + "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~" +) +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') +_option_header_piece_re = re.compile( + r""" + ;\s*,?\s* # newlines were replaced with commas + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^\s;,=*]+ # token + ) + (?:\*(?P\d+))? # *1, optional continuation index + \s* + (?: # optionally followed by =value + (?: # equals sign, possibly with encoding + \*\s*=\s* # * indicates extended notation + (?: # optional encoding + (?P[^\s]+?) + '(?P[^\s]*?)' + )? + | + =\s* # basic notation + ) + (?P + "[^"\\]*(?:\\.[^"\\]*)*" # quoted string + | + [^;,]+ # token + )? + )? + \s* + """, + flags=re.VERBOSE, +) +_option_header_start_mime_type = re.compile(r",\s*([^;,\s]+)([;,]\s*.+)?") +_entity_headers = frozenset( + [ + "allow", + "content-encoding", + "content-language", + "content-length", + "content-location", + "content-md5", + "content-range", + "content-type", + "expires", + "last-modified", + ] +) +_hop_by_hop_headers = frozenset( + [ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailer", + "transfer-encoding", + "upgrade", + ] +) +HTTP_STATUS_CODES = { + 100: "Continue", + 101: "Switching Protocols", + 102: "Processing", + 103: "Early Hints", # see RFC 8297 + 200: "OK", + 201: "Created", + 202: "Accepted", + 203: "Non Authoritative Information", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 207: "Multi Status", + 208: "Already Reported", # see RFC 5842 + 226: "IM Used", # see RFC 3229 + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 305: "Use Proxy", + 306: "Switch Proxy", # unused + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", # unused + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", # see RFC 2324 + 421: "Misdirected Request", # see RFC 7540 + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 425: "Too Early", # see RFC 8470 + 426: "Upgrade Required", + 428: "Precondition Required", # see RFC 6585 + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 449: "Retry With", # proprietary MS extension + 451: "Unavailable For Legal Reasons", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", # see RFC 2295 + 507: "Insufficient Storage", + 508: "Loop Detected", # see RFC 5842 + 510: "Not Extended", + 511: "Network Authentication Failed", +} + + +class COEP(Enum): + """Cross Origin Embedder Policies""" + + UNSAFE_NONE = "unsafe-none" + REQUIRE_CORP = "require-corp" + + +class COOP(Enum): + """Cross Origin Opener Policies""" + + UNSAFE_NONE = "unsafe-none" + SAME_ORIGIN_ALLOW_POPUPS = "same-origin-allow-popups" + SAME_ORIGIN = "same-origin" + + +def quote_header_value( + value: t.Union[str, int], extra_chars: str = "", allow_token: bool = True +) -> str: + """Quote a header value if necessary. + + .. versionadded:: 0.5 + + :param value: the value to quote. + :param extra_chars: a list of extra characters to skip quoting. + :param allow_token: if this is enabled token values are returned + unchanged. + """ + if isinstance(value, bytes): + value = value.decode("latin1") + value = str(value) + if allow_token: + token_chars = _token_chars | set(extra_chars) + if set(value).issubset(token_chars): + return value + value = value.replace("\\", "\\\\").replace('"', '\\"') + return f'"{value}"' + + +def unquote_header_value(value: str, is_filename: bool = False) -> str: + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + .. versionadded:: 0.5 + + :param value: the header value to unquote. + :param is_filename: The value represents a filename or path. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dump_options_header( + header: t.Optional[str], options: t.Mapping[str, t.Optional[t.Union[str, int]]] +) -> str: + """The reverse function to :func:`parse_options_header`. + + :param header: the header to dump + :param options: a dict of options to append. + """ + segments = [] + if header is not None: + segments.append(header) + for key, value in options.items(): + if value is None: + segments.append(key) + else: + segments.append(f"{key}={quote_header_value(value)}") + return "; ".join(segments) + + +def dump_header( + iterable: t.Union[t.Dict[str, t.Union[str, int]], t.Iterable[str]], + allow_token: bool = True, +) -> str: + """Dump an HTTP header again. This is the reversal of + :func:`parse_list_header`, :func:`parse_set_header` and + :func:`parse_dict_header`. This also quotes strings that include an + equals sign unless you pass it as dict of key, value pairs. + + >>> dump_header({'foo': 'bar baz'}) + 'foo="bar baz"' + >>> dump_header(('foo', 'bar baz')) + 'foo, "bar baz"' + + :param iterable: the iterable or dict of values to quote. + :param allow_token: if set to `False` tokens as values are disallowed. + See :func:`quote_header_value` for more details. + """ + if isinstance(iterable, dict): + items = [] + for key, value in iterable.items(): + if value is None: + items.append(key) + else: + items.append( + f"{key}={quote_header_value(value, allow_token=allow_token)}" + ) + else: + items = [quote_header_value(x, allow_token=allow_token) for x in iterable] + return ", ".join(items) + + +def dump_csp_header(header: "ds.ContentSecurityPolicy") -> str: + """Dump a Content Security Policy header. + + These are structured into policies such as "default-src 'self'; + script-src 'self'". + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + """ + return "; ".join(f"{key} {value}" for key, value in header.items()) + + +def parse_list_header(value: str) -> t.List[str]: + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +def parse_dict_header(value: str, cls: t.Type[dict] = dict) -> t.Dict[str, str]: + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict (or any other mapping object created from + the type with a dict like interface provided by the `cls` argument): + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + .. versionchanged:: 0.9 + Added support for `cls` argument. + + :param value: a string with a dict header. + :param cls: callable to use for storage of parsed results. + :return: an instance of `cls` + """ + result = cls() + if isinstance(value, bytes): + value = value.decode("latin1") + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +@typing.overload +def parse_options_header( + value: t.Optional[str], multiple: "te.Literal[False]" = False +) -> t.Tuple[str, t.Dict[str, str]]: + ... + + +@typing.overload +def parse_options_header( + value: t.Optional[str], multiple: "te.Literal[True]" +) -> t.Tuple[t.Any, ...]: + ... + + +def parse_options_header( + value: t.Optional[str], multiple: bool = False +) -> t.Union[t.Tuple[str, t.Dict[str, str]], t.Tuple[t.Any, ...]]: + """Parse a ``Content-Type`` like header into a tuple with the content + type and the options: + + >>> parse_options_header('text/html; charset=utf8') + ('text/html', {'charset': 'utf8'}) + + This should not be used to parse ``Cache-Control`` like headers that use + a slightly different format. For these headers use the + :func:`parse_dict_header` function. + + .. versionchanged:: 0.15 + :rfc:`2231` parameter continuations are handled. + + .. versionadded:: 0.5 + + :param value: the header to parse. + :param multiple: Whether try to parse and return multiple MIME types + :return: (mimetype, options) or (mimetype, options, mimetype, options, …) + if multiple=True + """ + if not value: + return "", {} + + result: t.List[t.Any] = [] + + value = "," + value.replace("\n", ",") + while value: + match = _option_header_start_mime_type.match(value) + if not match: + break + result.append(match.group(1)) # mimetype + options: t.Dict[str, str] = {} + # Parse options + rest = match.group(2) + encoding: t.Optional[str] + continued_encoding: t.Optional[str] = None + while rest: + optmatch = _option_header_piece_re.match(rest) + if not optmatch: + break + option, count, encoding, language, option_value = optmatch.groups() + # Continuations don't have to supply the encoding after the + # first line. If we're in a continuation, track the current + # encoding to use for subsequent lines. Reset it when the + # continuation ends. + if not count: + continued_encoding = None + else: + if not encoding: + encoding = continued_encoding + continued_encoding = encoding + option = unquote_header_value(option) + if option_value is not None: + option_value = unquote_header_value(option_value, option == "filename") + if encoding is not None: + option_value = _unquote(option_value).decode(encoding) + if count: + # Continuations append to the existing value. For + # simplicity, this ignores the possibility of + # out-of-order indices, which shouldn't happen anyway. + options[option] = options.get(option, "") + option_value + else: + options[option] = option_value + rest = rest[optmatch.end() :] + result.append(options) + if multiple is False: + return tuple(result) + value = rest + + return tuple(result) if result else ("", {}) + + +_TAnyAccept = t.TypeVar("_TAnyAccept", bound="ds.Accept") + + +@typing.overload +def parse_accept_header(value: t.Optional[str]) -> "ds.Accept": + ... + + +@typing.overload +def parse_accept_header( + value: t.Optional[str], cls: t.Type[_TAnyAccept] +) -> _TAnyAccept: + ... + + +def parse_accept_header( + value: t.Optional[str], cls: t.Optional[t.Type[_TAnyAccept]] = None +) -> _TAnyAccept: + """Parses an HTTP Accept-* header. This does not implement a complete + valid algorithm but one that supports at least value and quality + extraction. + + Returns a new :class:`Accept` object (basically a list of ``(value, quality)`` + tuples sorted by the quality with some additional accessor methods). + + The second parameter can be a subclass of :class:`Accept` that is created + with the parsed values and returned. + + :param value: the accept header string to be parsed. + :param cls: the wrapper class for the return value (can be + :class:`Accept` or a subclass thereof) + :return: an instance of `cls`. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyAccept], ds.Accept) + + if not value: + return cls(None) + + result = [] + for match in _accept_re.finditer(value): + quality_match = match.group(2) + if not quality_match: + quality: float = 1 + else: + quality = max(min(float(quality_match), 1), 0) + result.append((match.group(1), quality)) + return cls(result) + + +_TAnyCC = t.TypeVar("_TAnyCC", bound="ds._CacheControl") +_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] + + +@typing.overload +def parse_cache_control_header( + value: t.Optional[str], on_update: _t_cc_update, cls: None = None +) -> "ds.RequestCacheControl": + ... + + +@typing.overload +def parse_cache_control_header( + value: t.Optional[str], on_update: _t_cc_update, cls: t.Type[_TAnyCC] +) -> _TAnyCC: + ... + + +def parse_cache_control_header( + value: t.Optional[str], + on_update: _t_cc_update = None, + cls: t.Optional[t.Type[_TAnyCC]] = None, +) -> _TAnyCC: + """Parse a cache control header. The RFC differs between response and + request cache control, this method does not. It's your responsibility + to not use the wrong control statements. + + .. versionadded:: 0.5 + The `cls` was added. If not specified an immutable + :class:`~werkzeug.datastructures.RequestCacheControl` is returned. + + :param value: a cache control header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.CacheControl` + object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.RequestCacheControl` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) + + if not value: + return cls((), on_update) + + return cls(parse_dict_header(value), on_update) + + +_TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") +_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] + + +@typing.overload +def parse_csp_header( + value: t.Optional[str], on_update: _t_csp_update, cls: None = None +) -> "ds.ContentSecurityPolicy": + ... + + +@typing.overload +def parse_csp_header( + value: t.Optional[str], on_update: _t_csp_update, cls: t.Type[_TAnyCSP] +) -> _TAnyCSP: + ... + + +def parse_csp_header( + value: t.Optional[str], + on_update: _t_csp_update = None, + cls: t.Optional[t.Type[_TAnyCSP]] = None, +) -> _TAnyCSP: + """Parse a Content Security Policy header. + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + :param value: a csp header to be parsed. + :param on_update: an optional callable that is called every time a value + on the object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) + + if value is None: + return cls((), on_update) + + items = [] + + for policy in value.split(";"): + policy = policy.strip() + + # Ignore badly formatted policies (no space) + if " " in policy: + directive, value = policy.strip().split(" ", 1) + items.append((directive.strip(), value.strip())) + + return cls(items, on_update) + + +def parse_set_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.HeaderSet"], None]] = None, +) -> "ds.HeaderSet": + """Parse a set-like header and return a + :class:`~werkzeug.datastructures.HeaderSet` object: + + >>> hs = parse_set_header('token, "quoted value"') + + The return value is an object that treats the items case-insensitively + and keeps the order of the items: + + >>> 'TOKEN' in hs + True + >>> hs.index('quoted value') + 1 + >>> hs + HeaderSet(['token', 'quoted value']) + + To create a header from the :class:`HeaderSet` again, use the + :func:`dump_header` function. + + :param value: a set header to be parsed. + :param on_update: an optional callable that is called every time a + value on the :class:`~werkzeug.datastructures.HeaderSet` + object is changed. + :return: a :class:`~werkzeug.datastructures.HeaderSet` + """ + if not value: + return ds.HeaderSet(None, on_update) + return ds.HeaderSet(parse_list_header(value), on_update) + + +def parse_authorization_header( + value: t.Optional[str], +) -> t.Optional["ds.Authorization"]: + """Parse an HTTP basic/digest authorization header transmitted by the web + browser. The return value is either `None` if the header was invalid or + not given, otherwise an :class:`~werkzeug.datastructures.Authorization` + object. + + :param value: the authorization header to parse. + :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. + """ + if not value: + return None + value = _wsgi_decoding_dance(value) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except ValueError: + return None + if auth_type == "basic": + try: + username, password = base64.b64decode(auth_info).split(b":", 1) + except Exception: + return None + try: + return ds.Authorization( + "basic", + { + "username": _to_str(username, "utf-8"), + "password": _to_str(password, "utf-8"), + }, + ) + except UnicodeDecodeError: + return None + elif auth_type == "digest": + auth_map = parse_dict_header(auth_info) + for key in "username", "realm", "nonce", "uri", "response": + if key not in auth_map: + return None + if "qop" in auth_map: + if not auth_map.get("nc") or not auth_map.get("cnonce"): + return None + return ds.Authorization("digest", auth_map) + return None + + +def parse_www_authenticate_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.WWWAuthenticate"], None]] = None, +) -> "ds.WWWAuthenticate": + """Parse an HTTP WWW-Authenticate header into a + :class:`~werkzeug.datastructures.WWWAuthenticate` object. + + :param value: a WWW-Authenticate header to parse. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.WWWAuthenticate` + object is changed. + :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. + """ + if not value: + return ds.WWWAuthenticate(on_update=on_update) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except (ValueError, AttributeError): + return ds.WWWAuthenticate(value.strip().lower(), on_update=on_update) + return ds.WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update) + + +def parse_if_range_header(value: t.Optional[str]) -> "ds.IfRange": + """Parses an if-range header which can be an etag or a date. Returns + a :class:`~werkzeug.datastructures.IfRange` object. + + .. versionchanged:: 2.0 + If the value represents a datetime, it is timezone-aware. + + .. versionadded:: 0.7 + """ + if not value: + return ds.IfRange() + date = parse_date(value) + if date is not None: + return ds.IfRange(date=date) + # drop weakness information + return ds.IfRange(unquote_etag(value)[0]) + + +def parse_range_header( + value: t.Optional[str], make_inclusive: bool = True +) -> t.Optional["ds.Range"]: + """Parses a range header into a :class:`~werkzeug.datastructures.Range` + object. If the header is missing or malformed `None` is returned. + `ranges` is a list of ``(start, stop)`` tuples where the ranges are + non-inclusive. + + .. versionadded:: 0.7 + """ + if not value or "=" not in value: + return None + + ranges = [] + last_end = 0 + units, rng = value.split("=", 1) + units = units.strip().lower() + + for item in rng.split(","): + item = item.strip() + if "-" not in item: + return None + if item.startswith("-"): + if last_end < 0: + return None + try: + begin = int(item) + except ValueError: + return None + end = None + last_end = -1 + elif "-" in item: + begin_str, end_str = item.split("-", 1) + begin_str = begin_str.strip() + end_str = end_str.strip() + if not begin_str.isdigit(): + return None + begin = int(begin_str) + if begin < last_end or last_end < 0: + return None + if end_str: + if not end_str.isdigit(): + return None + end = int(end_str) + 1 + if begin >= end: + return None + else: + end = None + last_end = end if end is not None else -1 + ranges.append((begin, end)) + + return ds.Range(units, ranges) + + +def parse_content_range_header( + value: t.Optional[str], + on_update: t.Optional[t.Callable[["ds.ContentRange"], None]] = None, +) -> t.Optional["ds.ContentRange"]: + """Parses a range header into a + :class:`~werkzeug.datastructures.ContentRange` object or `None` if + parsing is not possible. + + .. versionadded:: 0.7 + + :param value: a content range header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.ContentRange` + object is changed. + """ + if value is None: + return None + try: + units, rangedef = (value or "").strip().split(None, 1) + except ValueError: + return None + + if "/" not in rangedef: + return None + rng, length_str = rangedef.split("/", 1) + if length_str == "*": + length = None + elif length_str.isdigit(): + length = int(length_str) + else: + return None + + if rng == "*": + return ds.ContentRange(units, None, None, length, on_update=on_update) + elif "-" not in rng: + return None + + start_str, stop_str = rng.split("-", 1) + try: + start = int(start_str) + stop = int(stop_str) + 1 + except ValueError: + return None + + if is_byte_range_valid(start, stop, length): + return ds.ContentRange(units, start, stop, length, on_update=on_update) + + return None + + +def quote_etag(etag: str, weak: bool = False) -> str: + """Quote an etag. + + :param etag: the etag to quote. + :param weak: set to `True` to tag it "weak". + """ + if '"' in etag: + raise ValueError("invalid etag") + etag = f'"{etag}"' + if weak: + etag = f"W/{etag}" + return etag + + +def unquote_etag( + etag: t.Optional[str], +) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: + """Unquote a single etag: + + >>> unquote_etag('W/"bar"') + ('bar', True) + >>> unquote_etag('"bar"') + ('bar', False) + + :param etag: the etag identifier to unquote. + :return: a ``(etag, weak)`` tuple. + """ + if not etag: + return None, None + etag = etag.strip() + weak = False + if etag.startswith(("W/", "w/")): + weak = True + etag = etag[2:] + if etag[:1] == etag[-1:] == '"': + etag = etag[1:-1] + return etag, weak + + +def parse_etags(value: t.Optional[str]) -> "ds.ETags": + """Parse an etag header. + + :param value: the tag header to parse + :return: an :class:`~werkzeug.datastructures.ETags` object. + """ + if not value: + return ds.ETags() + strong = [] + weak = [] + end = len(value) + pos = 0 + while pos < end: + match = _etag_re.match(value, pos) + if match is None: + break + is_weak, quoted, raw = match.groups() + if raw == "*": + return ds.ETags(star_tag=True) + elif quoted: + raw = quoted + if is_weak: + weak.append(raw) + else: + strong.append(raw) + pos = match.end() + return ds.ETags(strong, weak) + + +def generate_etag(data: bytes) -> str: + """Generate an etag for some data. + + .. versionchanged:: 2.0 + Use SHA-1. MD5 may not be available in some environments. + """ + return sha1(data).hexdigest() + + +def parse_date(value: t.Optional[str]) -> t.Optional[datetime]: + """Parse an :rfc:`2822` date into a timezone-aware + :class:`datetime.datetime` object, or ``None`` if parsing fails. + + This is a wrapper for :func:`email.utils.parsedate_to_datetime`. It + returns ``None`` if parsing fails instead of raising an exception, + and always returns a timezone-aware datetime object. If the string + doesn't have timezone information, it is assumed to be UTC. + + :param value: A string with a supported date format. + + .. versionchanged:: 2.0 + Return a timezone-aware datetime object. Use + ``email.utils.parsedate_to_datetime``. + """ + if value is None: + return None + + try: + dt = email.utils.parsedate_to_datetime(value) + except (TypeError, ValueError): + return None + + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + + return dt + + +def cookie_date( + expires: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None +) -> str: + """Format a datetime object or timestamp into an :rfc:`2822` date + string for ``Set-Cookie expires``. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :func:`http_date` instead. + """ + warnings.warn( + "'cookie_date' is deprecated and will be removed in Werkzeug" + " 2.1. Use 'http_date' instead.", + DeprecationWarning, + stacklevel=2, + ) + return http_date(expires) + + +def http_date( + timestamp: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None +) -> str: + """Format a datetime object or timestamp into an :rfc:`2822` date + string. + + This is a wrapper for :func:`email.utils.format_datetime`. It + assumes naive datetime objects are in UTC instead of raising an + exception. + + :param timestamp: The datetime or timestamp to format. Defaults to + the current time. + + .. versionchanged:: 2.0 + Use ``email.utils.format_datetime``. Accept ``date`` objects. + """ + if isinstance(timestamp, date): + if not isinstance(timestamp, datetime): + # Assume plain date is midnight UTC. + timestamp = datetime.combine(timestamp, time(), tzinfo=timezone.utc) + else: + # Ensure datetime is timezone-aware. + timestamp = _dt_as_utc(timestamp) + + return email.utils.format_datetime(timestamp, usegmt=True) + + if isinstance(timestamp, struct_time): + timestamp = mktime(timestamp) + + return email.utils.formatdate(timestamp, usegmt=True) + + +def parse_age(value: t.Optional[str] = None) -> t.Optional[timedelta]: + """Parses a base-10 integer count of seconds into a timedelta. + + If parsing fails, the return value is `None`. + + :param value: a string consisting of an integer represented in base-10 + :return: a :class:`datetime.timedelta` object or `None`. + """ + if not value: + return None + try: + seconds = int(value) + except ValueError: + return None + if seconds < 0: + return None + try: + return timedelta(seconds=seconds) + except OverflowError: + return None + + +def dump_age(age: t.Optional[t.Union[timedelta, int]] = None) -> t.Optional[str]: + """Formats the duration as a base-10 integer. + + :param age: should be an integer number of seconds, + a :class:`datetime.timedelta` object, or, + if the age is unknown, `None` (default). + """ + if age is None: + return None + if isinstance(age, timedelta): + age = int(age.total_seconds()) + else: + age = int(age) + + if age < 0: + raise ValueError("age cannot be negative") + + return str(age) + + +def is_resource_modified( + environ: "WSGIEnvironment", + etag: t.Optional[str] = None, + data: t.Optional[bytes] = None, + last_modified: t.Optional[t.Union[datetime, str]] = None, + ignore_if_range: bool = True, +) -> bool: + """Convenience method for conditional requests. + + :param environ: the WSGI environment of the request to be checked. + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :param ignore_if_range: If `False`, `If-Range` header will be taken into + account. + :return: `True` if the resource was modified, otherwise `False`. + + .. versionchanged:: 2.0 + SHA-1 is used to generate an etag value for the data. MD5 may + not be available in some environments. + + .. versionchanged:: 1.0.0 + The check is run for methods other than ``GET`` and ``HEAD``. + """ + if etag is None and data is not None: + etag = generate_etag(data) + elif data is not None: + raise TypeError("both data and etag given") + + unmodified = False + if isinstance(last_modified, str): + last_modified = parse_date(last_modified) + + # HTTP doesn't use microsecond, remove it to avoid false positive + # comparisons. Mark naive datetimes as UTC. + if last_modified is not None: + last_modified = _dt_as_utc(last_modified.replace(microsecond=0)) + + if_range = None + if not ignore_if_range and "HTTP_RANGE" in environ: + # https://tools.ietf.org/html/rfc7233#section-3.2 + # A server MUST ignore an If-Range header field received in a request + # that does not contain a Range header field. + if_range = parse_if_range_header(environ.get("HTTP_IF_RANGE")) + + if if_range is not None and if_range.date is not None: + modified_since: t.Optional[datetime] = if_range.date + else: + modified_since = parse_date(environ.get("HTTP_IF_MODIFIED_SINCE")) + + if modified_since and last_modified and last_modified <= modified_since: + unmodified = True + + if etag: + etag, _ = unquote_etag(etag) + etag = t.cast(str, etag) + + if if_range is not None and if_range.etag is not None: + unmodified = parse_etags(if_range.etag).contains(etag) + else: + if_none_match = parse_etags(environ.get("HTTP_IF_NONE_MATCH")) + if if_none_match: + # https://tools.ietf.org/html/rfc7232#section-3.2 + # "A recipient MUST use the weak comparison function when comparing + # entity-tags for If-None-Match" + unmodified = if_none_match.contains_weak(etag) + + # https://tools.ietf.org/html/rfc7232#section-3.1 + # "Origin server MUST use the strong comparison function when + # comparing entity-tags for If-Match" + if_match = parse_etags(environ.get("HTTP_IF_MATCH")) + if if_match: + unmodified = not if_match.is_strong(etag) + + return not unmodified + + +def remove_entity_headers( + headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]], + allowed: t.Iterable[str] = ("expires", "content-location"), +) -> None: + """Remove all entity headers from a list or :class:`Headers` object. This + operation works in-place. `Expires` and `Content-Location` headers are + by default not removed. The reason for this is :rfc:`2616` section + 10.3.5 which specifies some entity headers that should be sent. + + .. versionchanged:: 0.5 + added `allowed` parameter. + + :param headers: a list or :class:`Headers` object. + :param allowed: a list of headers that should still be allowed even though + they are entity headers. + """ + allowed = {x.lower() for x in allowed} + headers[:] = [ + (key, value) + for key, value in headers + if not is_entity_header(key) or key.lower() in allowed + ] + + +def remove_hop_by_hop_headers( + headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]] +) -> None: + """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or + :class:`Headers` object. This operation works in-place. + + .. versionadded:: 0.5 + + :param headers: a list or :class:`Headers` object. + """ + headers[:] = [ + (key, value) for key, value in headers if not is_hop_by_hop_header(key) + ] + + +def is_entity_header(header: str) -> bool: + """Check if a header is an entity header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an entity header, `False` otherwise. + """ + return header.lower() in _entity_headers + + +def is_hop_by_hop_header(header: str) -> bool: + """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise. + """ + return header.lower() in _hop_by_hop_headers + + +def parse_cookie( + header: t.Union["WSGIEnvironment", str, bytes, None], + charset: str = "utf-8", + errors: str = "replace", + cls: t.Optional[t.Type["ds.MultiDict"]] = None, +) -> "ds.MultiDict[str, str]": + """Parse a cookie from a string or WSGI environ. + + The same key can be provided multiple times, the values are stored + in-order. The default :class:`MultiDict` will have the first value + first, and all values can be retrieved with + :meth:`MultiDict.getlist`. + + :param header: The cookie header as a string, or a WSGI environ dict + with a ``HTTP_COOKIE`` key. + :param charset: The charset for the cookie values. + :param errors: The error behavior for the charset decoding. + :param cls: A dict-like class to store the parsed cookies in. + Defaults to :class:`MultiDict`. + + .. versionchanged:: 1.0.0 + Returns a :class:`MultiDict` instead of a + ``TypeConversionDict``. + + .. versionchanged:: 0.5 + Returns a :class:`TypeConversionDict` instead of a regular dict. + The ``cls`` parameter was added. + """ + if isinstance(header, dict): + header = header.get("HTTP_COOKIE", "") + elif header is None: + header = "" + + # PEP 3333 sends headers through the environ as latin1 decoded + # strings. Encode strings back to bytes for parsing. + if isinstance(header, str): + header = header.encode("latin1", "replace") + + if cls is None: + cls = ds.MultiDict + + def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]: + for key, val in _cookie_parse_impl(header): # type: ignore + key_str = _to_str(key, charset, errors, allow_none_charset=True) + + if not key_str: + continue + + val_str = _to_str(val, charset, errors, allow_none_charset=True) + yield key_str, val_str + + return cls(_parse_pairs()) + + +def dump_cookie( + key: str, + value: t.Union[bytes, str] = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: t.Optional[str] = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + charset: str = "utf-8", + sync_expires: bool = True, + max_size: int = 4093, + samesite: t.Optional[str] = None, +) -> str: + """Create a Set-Cookie header without the ``Set-Cookie`` prefix. + + The return value is usually restricted to ascii as the vast majority + of values are properly escaped, but that is no guarantee. It's + tunneled through latin1 as required by :pep:`3333`. + + The return value is not ASCII safe if the key contains unicode + characters. This is technically against the specification but + happens in the wild. It's strongly recommended to not use + non-ASCII values for the keys. + + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. Additionally `timedelta` objects + are accepted, too. + :param expires: should be a `datetime` object or unix timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: Use this if you want to set a cross-domain cookie. For + example, ``domain=".example.com"`` will set a cookie + that is readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: The cookie will only be available via HTTPS + :param httponly: disallow JavaScript to access the cookie. This is an + extension to the cookie standard and probably not + supported by all browsers. + :param charset: the encoding for string values. + :param sync_expires: automatically set expires if max_age is defined + but expires not. + :param max_size: Warn if the final header value exceeds this size. The + default, 4093, should be safely `supported by most browsers + `_. Set to 0 to disable this check. + :param samesite: Limits the scope of the cookie such that it will + only be attached to requests if those requests are same-site. + + .. _`cookie`: http://browsercookielimits.squawky.net/ + + .. versionchanged:: 1.0.0 + The string ``'None'`` is accepted for ``samesite``. + """ + key = _to_bytes(key, charset) + value = _to_bytes(value, charset) + + if path is not None: + from .urls import iri_to_uri + + path = iri_to_uri(path, charset) + + domain = _make_cookie_domain(domain) + + if isinstance(max_age, timedelta): + max_age = int(max_age.total_seconds()) + + if expires is not None: + if not isinstance(expires, str): + expires = http_date(expires) + elif max_age is not None and sync_expires: + expires = http_date(datetime.now(tz=timezone.utc).timestamp() + max_age) + + if samesite is not None: + samesite = samesite.title() + + if samesite not in {"Strict", "Lax", "None"}: + raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.") + + buf = [key + b"=" + _cookie_quote(value)] + + # XXX: In theory all of these parameters that are not marked with `None` + # should be quoted. Because stdlib did not quote it before I did not + # want to introduce quoting there now. + for k, v, q in ( + (b"Domain", domain, True), + (b"Expires", expires, False), + (b"Max-Age", max_age, False), + (b"Secure", secure, None), + (b"HttpOnly", httponly, None), + (b"Path", path, False), + (b"SameSite", samesite, False), + ): + if q is None: + if v: + buf.append(k) + continue + + if v is None: + continue + + tmp = bytearray(k) + if not isinstance(v, (bytes, bytearray)): + v = _to_bytes(str(v), charset) + if q: + v = _cookie_quote(v) + tmp += b"=" + v + buf.append(bytes(tmp)) + + # The return value will be an incorrectly encoded latin1 header for + # consistency with the headers object. + rv = b"; ".join(buf) + rv = rv.decode("latin1") + + # Warn if the final value of the cookie is larger than the limit. If the + # cookie is too large, then it may be silently ignored by the browser, + # which can be quite hard to debug. + cookie_size = len(rv) + + if max_size and cookie_size > max_size: + value_size = len(value) + warnings.warn( + f"The {key.decode(charset)!r} cookie is too large: the value was" + f" {value_size} bytes but the" + f" header required {cookie_size - value_size} extra bytes. The final size" + f" was {cookie_size} bytes but the limit is {max_size} bytes. Browsers may" + f" silently ignore cookies larger than this.", + stacklevel=2, + ) + + return rv + + +def is_byte_range_valid( + start: t.Optional[int], stop: t.Optional[int], length: t.Optional[int] +) -> bool: + """Checks if a given byte content range is valid for the given length. + + .. versionadded:: 0.7 + """ + if (start is None) != (stop is None): + return False + elif start is None: + return length is None or length >= 0 + elif length is None: + return 0 <= start < stop # type: ignore + elif start >= stop: # type: ignore + return False + return 0 <= start < length + + +# circular dependencies +from . import datastructures as ds diff --git a/venv/lib/python3.8/site-packages/werkzeug/local.py b/venv/lib/python3.8/site-packages/werkzeug/local.py new file mode 100644 index 0000000..b4dee7b --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/local.py @@ -0,0 +1,677 @@ +import copy +import math +import operator +import sys +import typing as t +import warnings +from functools import partial +from functools import update_wrapper + +from .wsgi import ClosingIterator + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +try: + from greenlet import getcurrent as _get_ident +except ImportError: + from threading import get_ident as _get_ident + + +def get_ident() -> int: + warnings.warn( + "'get_ident' is deprecated and will be removed in Werkzeug" + " 2.1. Use 'greenlet.getcurrent' or 'threading.get_ident' for" + " previous behavior.", + DeprecationWarning, + stacklevel=2, + ) + return _get_ident() # type: ignore + + +class _CannotUseContextVar(Exception): + pass + + +try: + from contextvars import ContextVar + + if "gevent" in sys.modules or "eventlet" in sys.modules: + # Both use greenlet, so first check it has patched + # ContextVars, Greenlet <0.4.17 does not. + import greenlet + + greenlet_patched = getattr(greenlet, "GREENLET_USE_CONTEXT_VARS", False) + + if not greenlet_patched: + # If Gevent is used, check it has patched ContextVars, + # <20.5 does not. + try: + from gevent.monkey import is_object_patched + except ImportError: + # Gevent isn't used, but Greenlet is and hasn't patched + raise _CannotUseContextVar() from None + else: + if is_object_patched("threading", "local") and not is_object_patched( + "contextvars", "ContextVar" + ): + raise _CannotUseContextVar() + + def __release_local__(storage: t.Any) -> None: + # Can remove when support for non-stdlib ContextVars is + # removed, see "Fake" version below. + storage.set({}) + + +except (ImportError, _CannotUseContextVar): + + class ContextVar: # type: ignore + """A fake ContextVar based on the previous greenlet/threading + ident function. Used on Python 3.6, eventlet, and old versions + of gevent. + """ + + def __init__(self, _name: str) -> None: + self.storage: t.Dict[int, t.Dict[str, t.Any]] = {} + + def get(self, default: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: + return self.storage.get(_get_ident(), default) + + def set(self, value: t.Dict[str, t.Any]) -> None: + self.storage[_get_ident()] = value + + def __release_local__(storage: t.Any) -> None: + # Special version to ensure that the storage is cleaned up on + # release. + storage.storage.pop(_get_ident(), None) + + +def release_local(local: t.Union["Local", "LocalStack"]) -> None: + """Releases the contents of the local for the current context. + This makes it possible to use locals without a manager. + + Example:: + + >>> loc = Local() + >>> loc.foo = 42 + >>> release_local(loc) + >>> hasattr(loc, 'foo') + False + + With this function one can release :class:`Local` objects as well + as :class:`LocalStack` objects. However it is not possible to + release data held by proxies that way, one always has to retain + a reference to the underlying local object in order to be able + to release it. + + .. versionadded:: 0.6.1 + """ + local.__release_local__() + + +class Local: + __slots__ = ("_storage",) + + def __init__(self) -> None: + object.__setattr__(self, "_storage", ContextVar("local_storage")) + + @property + def __storage__(self) -> t.Dict[str, t.Any]: + warnings.warn( + "'__storage__' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + return self._storage.get({}) # type: ignore + + @property + def __ident_func__(self) -> t.Callable[[], int]: + warnings.warn( + "'__ident_func__' is deprecated and will be removed in" + " Werkzeug 2.1. It should not be used in Python 3.7+.", + DeprecationWarning, + stacklevel=2, + ) + return _get_ident # type: ignore + + @__ident_func__.setter + def __ident_func__(self, func: t.Callable[[], int]) -> None: + warnings.warn( + "'__ident_func__' is deprecated and will be removed in" + " Werkzeug 2.1. Setting it no longer has any effect.", + DeprecationWarning, + stacklevel=2, + ) + + def __iter__(self) -> t.Iterator[t.Tuple[int, t.Any]]: + return iter(self._storage.get({}).items()) + + def __call__(self, proxy: str) -> "LocalProxy": + """Create a proxy for a name.""" + return LocalProxy(self, proxy) + + def __release_local__(self) -> None: + __release_local__(self._storage) + + def __getattr__(self, name: str) -> t.Any: + values = self._storage.get({}) + try: + return values[name] + except KeyError: + raise AttributeError(name) from None + + def __setattr__(self, name: str, value: t.Any) -> None: + values = self._storage.get({}).copy() + values[name] = value + self._storage.set(values) + + def __delattr__(self, name: str) -> None: + values = self._storage.get({}).copy() + try: + del values[name] + self._storage.set(values) + except KeyError: + raise AttributeError(name) from None + + +class LocalStack: + """This class works similar to a :class:`Local` but keeps a stack + of objects instead. This is best explained with an example:: + + >>> ls = LocalStack() + >>> ls.push(42) + >>> ls.top + 42 + >>> ls.push(23) + >>> ls.top + 23 + >>> ls.pop() + 23 + >>> ls.top + 42 + + They can be force released by using a :class:`LocalManager` or with + the :func:`release_local` function but the correct way is to pop the + item from the stack after using. When the stack is empty it will + no longer be bound to the current context (and as such released). + + By calling the stack without arguments it returns a proxy that resolves to + the topmost item on the stack. + + .. versionadded:: 0.6.1 + """ + + def __init__(self) -> None: + self._local = Local() + + def __release_local__(self) -> None: + self._local.__release_local__() + + @property + def __ident_func__(self) -> t.Callable[[], int]: + return self._local.__ident_func__ + + @__ident_func__.setter + def __ident_func__(self, value: t.Callable[[], int]) -> None: + object.__setattr__(self._local, "__ident_func__", value) + + def __call__(self) -> "LocalProxy": + def _lookup() -> t.Any: + rv = self.top + if rv is None: + raise RuntimeError("object unbound") + return rv + + return LocalProxy(_lookup) + + def push(self, obj: t.Any) -> t.List[t.Any]: + """Pushes a new item to the stack""" + rv = getattr(self._local, "stack", []).copy() + rv.append(obj) + self._local.stack = rv + return rv # type: ignore + + def pop(self) -> t.Any: + """Removes the topmost item from the stack, will return the + old value or `None` if the stack was already empty. + """ + stack = getattr(self._local, "stack", None) + if stack is None: + return None + elif len(stack) == 1: + release_local(self._local) + return stack[-1] + else: + return stack.pop() + + @property + def top(self) -> t.Any: + """The topmost item on the stack. If the stack is empty, + `None` is returned. + """ + try: + return self._local.stack[-1] + except (AttributeError, IndexError): + return None + + +class LocalManager: + """Local objects cannot manage themselves. For that you need a local + manager. You can pass a local manager multiple locals or add them + later by appending them to `manager.locals`. Every time the manager + cleans up, it will clean up all the data left in the locals for this + context. + + .. versionchanged:: 2.0 + ``ident_func`` is deprecated and will be removed in Werkzeug + 2.1. + + .. versionchanged:: 0.6.1 + The :func:`release_local` function can be used instead of a + manager. + + .. versionchanged:: 0.7 + The ``ident_func`` parameter was added. + """ + + def __init__( + self, + locals: t.Optional[t.Iterable[t.Union[Local, LocalStack]]] = None, + ident_func: None = None, + ) -> None: + if locals is None: + self.locals = [] + elif isinstance(locals, Local): + self.locals = [locals] + else: + self.locals = list(locals) + + if ident_func is not None: + warnings.warn( + "'ident_func' is deprecated and will be removed in" + " Werkzeug 2.1. Setting it no longer has any effect.", + DeprecationWarning, + stacklevel=2, + ) + + @property + def ident_func(self) -> t.Callable[[], int]: + warnings.warn( + "'ident_func' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + return _get_ident # type: ignore + + @ident_func.setter + def ident_func(self, func: t.Callable[[], int]) -> None: + warnings.warn( + "'ident_func' is deprecated and will be removedin Werkzeug" + " 2.1. Setting it no longer has any effect.", + DeprecationWarning, + stacklevel=2, + ) + + def get_ident(self) -> int: + """Return the context identifier the local objects use internally for + this context. You cannot override this method to change the behavior + but use it to link other context local objects (such as SQLAlchemy's + scoped sessions) to the Werkzeug locals. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. + + .. versionchanged:: 0.7 + You can pass a different ident function to the local manager that + will then be propagated to all the locals passed to the + constructor. + """ + warnings.warn( + "'get_ident' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + return self.ident_func() + + def cleanup(self) -> None: + """Manually clean up the data in the locals for this context. Call + this at the end of the request or use `make_middleware()`. + """ + for local in self.locals: + release_local(local) + + def make_middleware(self, app: "WSGIApplication") -> "WSGIApplication": + """Wrap a WSGI application so that cleaning up happens after + request end. + """ + + def application( + environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + return ClosingIterator(app(environ, start_response), self.cleanup) + + return application + + def middleware(self, func: "WSGIApplication") -> "WSGIApplication": + """Like `make_middleware` but for decorating functions. + + Example usage:: + + @manager.middleware + def application(environ, start_response): + ... + + The difference to `make_middleware` is that the function passed + will have all the arguments copied from the inner application + (name, docstring, module). + """ + return update_wrapper(self.make_middleware(func), func) + + def __repr__(self) -> str: + return f"<{type(self).__name__} storages: {len(self.locals)}>" + + +class _ProxyLookup: + """Descriptor that handles proxied attribute lookup for + :class:`LocalProxy`. + + :param f: The built-in function this attribute is accessed through. + Instead of looking up the special method, the function call + is redone on the object. + :param fallback: Call this method if the proxy is unbound instead of + raising a :exc:`RuntimeError`. + :param class_value: Value to return when accessed from the class. + Used for ``__doc__`` so building docs still works. + """ + + __slots__ = ("bind_f", "fallback", "class_value", "name") + + def __init__( + self, + f: t.Optional[t.Callable] = None, + fallback: t.Optional[t.Callable] = None, + class_value: t.Optional[t.Any] = None, + ) -> None: + bind_f: t.Optional[t.Callable[["LocalProxy", t.Any], t.Callable]] + + if hasattr(f, "__get__"): + # A Python function, can be turned into a bound method. + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + return f.__get__(obj, type(obj)) # type: ignore + + elif f is not None: + # A C function, use partial to bind the first argument. + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + return partial(f, obj) # type: ignore + + else: + # Use getattr, which will produce a bound method. + bind_f = None + + self.bind_f = bind_f + self.fallback = fallback + self.class_value = class_value + + def __set_name__(self, owner: "LocalProxy", name: str) -> None: + self.name = name + + def __get__(self, instance: "LocalProxy", owner: t.Optional[type] = None) -> t.Any: + if instance is None: + if self.class_value is not None: + return self.class_value + + return self + + try: + obj = instance._get_current_object() + except RuntimeError: + if self.fallback is None: + raise + + return self.fallback.__get__(instance, owner) # type: ignore + + if self.bind_f is not None: + return self.bind_f(instance, obj) + + return getattr(obj, self.name) + + def __repr__(self) -> str: + return f"proxy {self.name}" + + def __call__(self, instance: "LocalProxy", *args: t.Any, **kwargs: t.Any) -> t.Any: + """Support calling unbound methods from the class. For example, + this happens with ``copy.copy``, which does + ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it + returns the proxy type and descriptor. + """ + return self.__get__(instance, type(instance))(*args, **kwargs) + + +class _ProxyIOp(_ProxyLookup): + """Look up an augmented assignment method on a proxied object. The + method is wrapped to return the proxy instead of the object. + """ + + __slots__ = () + + def __init__( + self, f: t.Optional[t.Callable] = None, fallback: t.Optional[t.Callable] = None + ) -> None: + super().__init__(f, fallback) + + def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: + def i_op(self: t.Any, other: t.Any) -> "LocalProxy": + f(self, other) # type: ignore + return instance + + return i_op.__get__(obj, type(obj)) # type: ignore + + self.bind_f = bind_f + + +def _l_to_r_op(op: F) -> F: + """Swap the argument order to turn an l-op into an r-op.""" + + def r_op(obj: t.Any, other: t.Any) -> t.Any: + return op(other, obj) + + return t.cast(F, r_op) + + +class LocalProxy: + """A proxy to the object bound to a :class:`Local`. All operations + on the proxy are forwarded to the bound object. If no object is + bound, a :exc:`RuntimeError` is raised. + + .. code-block:: python + + from werkzeug.local import Local + l = Local() + + # a proxy to whatever l.user is set to + user = l("user") + + from werkzeug.local import LocalStack + _request_stack = LocalStack() + + # a proxy to _request_stack.top + request = _request_stack() + + # a proxy to the session attribute of the request proxy + session = LocalProxy(lambda: request.session) + + ``__repr__`` and ``__class__`` are forwarded, so ``repr(x)`` and + ``isinstance(x, cls)`` will look like the proxied object. Use + ``issubclass(type(x), LocalProxy)`` to check if an object is a + proxy. + + .. code-block:: python + + repr(user) # + isinstance(user, User) # True + issubclass(type(user), LocalProxy) # True + + :param local: The :class:`Local` or callable that provides the + proxied object. + :param name: The attribute name to look up on a :class:`Local`. Not + used if a callable is given. + + .. versionchanged:: 2.0 + Updated proxied attributes and methods to reflect the current + data model. + + .. versionchanged:: 0.6.1 + The class can be instantiated with a callable. + """ + + __slots__ = ("__local", "__name", "__wrapped__") + + def __init__( + self, + local: t.Union["Local", t.Callable[[], t.Any]], + name: t.Optional[str] = None, + ) -> None: + object.__setattr__(self, "_LocalProxy__local", local) + object.__setattr__(self, "_LocalProxy__name", name) + + if callable(local) and not hasattr(local, "__release_local__"): + # "local" is a callable that is not an instance of Local or + # LocalManager: mark it as a wrapped function. + object.__setattr__(self, "__wrapped__", local) + + def _get_current_object(self) -> t.Any: + """Return the current object. This is useful if you want the real + object behind the proxy at a time for performance reasons or because + you want to pass the object into a different context. + """ + if not hasattr(self.__local, "__release_local__"): # type: ignore + return self.__local() # type: ignore + + try: + return getattr(self.__local, self.__name) # type: ignore + except AttributeError: + name = self.__name # type: ignore + raise RuntimeError(f"no object bound to {name}") from None + + __doc__ = _ProxyLookup( # type: ignore + class_value=__doc__, fallback=lambda self: type(self).__doc__ + ) + # __del__ should only delete the proxy + __repr__ = _ProxyLookup( # type: ignore + repr, fallback=lambda self: f"<{type(self).__name__} unbound>" + ) + __str__ = _ProxyLookup(str) # type: ignore + __bytes__ = _ProxyLookup(bytes) + __format__ = _ProxyLookup() # type: ignore + __lt__ = _ProxyLookup(operator.lt) + __le__ = _ProxyLookup(operator.le) + __eq__ = _ProxyLookup(operator.eq) # type: ignore + __ne__ = _ProxyLookup(operator.ne) # type: ignore + __gt__ = _ProxyLookup(operator.gt) + __ge__ = _ProxyLookup(operator.ge) + __hash__ = _ProxyLookup(hash) # type: ignore + __bool__ = _ProxyLookup(bool, fallback=lambda self: False) + __getattr__ = _ProxyLookup(getattr) + # __getattribute__ triggered through __getattr__ + __setattr__ = _ProxyLookup(setattr) # type: ignore + __delattr__ = _ProxyLookup(delattr) # type: ignore + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore + # __get__ (proxying descriptor not supported) + # __set__ (descriptor) + # __delete__ (descriptor) + # __set_name__ (descriptor) + # __objclass__ (descriptor) + # __slots__ used by proxy itself + # __dict__ (__getattr__) + # __weakref__ (__getattr__) + # __init_subclass__ (proxying metaclass not supported) + # __prepare__ (metaclass) + __class__ = _ProxyLookup(fallback=lambda self: type(self)) # type: ignore + __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) + __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) + # __class_getitem__ triggered through __getitem__ + __call__ = _ProxyLookup(lambda self, *args, **kwargs: self(*args, **kwargs)) + __len__ = _ProxyLookup(len) + __length_hint__ = _ProxyLookup(operator.length_hint) + __getitem__ = _ProxyLookup(operator.getitem) + __setitem__ = _ProxyLookup(operator.setitem) + __delitem__ = _ProxyLookup(operator.delitem) + # __missing__ triggered through __getitem__ + __iter__ = _ProxyLookup(iter) + __next__ = _ProxyLookup(next) + __reversed__ = _ProxyLookup(reversed) + __contains__ = _ProxyLookup(operator.contains) + __add__ = _ProxyLookup(operator.add) + __sub__ = _ProxyLookup(operator.sub) + __mul__ = _ProxyLookup(operator.mul) + __matmul__ = _ProxyLookup(operator.matmul) + __truediv__ = _ProxyLookup(operator.truediv) + __floordiv__ = _ProxyLookup(operator.floordiv) + __mod__ = _ProxyLookup(operator.mod) + __divmod__ = _ProxyLookup(divmod) + __pow__ = _ProxyLookup(pow) + __lshift__ = _ProxyLookup(operator.lshift) + __rshift__ = _ProxyLookup(operator.rshift) + __and__ = _ProxyLookup(operator.and_) + __xor__ = _ProxyLookup(operator.xor) + __or__ = _ProxyLookup(operator.or_) + __radd__ = _ProxyLookup(_l_to_r_op(operator.add)) + __rsub__ = _ProxyLookup(_l_to_r_op(operator.sub)) + __rmul__ = _ProxyLookup(_l_to_r_op(operator.mul)) + __rmatmul__ = _ProxyLookup(_l_to_r_op(operator.matmul)) + __rtruediv__ = _ProxyLookup(_l_to_r_op(operator.truediv)) + __rfloordiv__ = _ProxyLookup(_l_to_r_op(operator.floordiv)) + __rmod__ = _ProxyLookup(_l_to_r_op(operator.mod)) + __rdivmod__ = _ProxyLookup(_l_to_r_op(divmod)) + __rpow__ = _ProxyLookup(_l_to_r_op(pow)) + __rlshift__ = _ProxyLookup(_l_to_r_op(operator.lshift)) + __rrshift__ = _ProxyLookup(_l_to_r_op(operator.rshift)) + __rand__ = _ProxyLookup(_l_to_r_op(operator.and_)) + __rxor__ = _ProxyLookup(_l_to_r_op(operator.xor)) + __ror__ = _ProxyLookup(_l_to_r_op(operator.or_)) + __iadd__ = _ProxyIOp(operator.iadd) + __isub__ = _ProxyIOp(operator.isub) + __imul__ = _ProxyIOp(operator.imul) + __imatmul__ = _ProxyIOp(operator.imatmul) + __itruediv__ = _ProxyIOp(operator.itruediv) + __ifloordiv__ = _ProxyIOp(operator.ifloordiv) + __imod__ = _ProxyIOp(operator.imod) + __ipow__ = _ProxyIOp(operator.ipow) + __ilshift__ = _ProxyIOp(operator.ilshift) + __irshift__ = _ProxyIOp(operator.irshift) + __iand__ = _ProxyIOp(operator.iand) + __ixor__ = _ProxyIOp(operator.ixor) + __ior__ = _ProxyIOp(operator.ior) + __neg__ = _ProxyLookup(operator.neg) + __pos__ = _ProxyLookup(operator.pos) + __abs__ = _ProxyLookup(abs) + __invert__ = _ProxyLookup(operator.invert) + __complex__ = _ProxyLookup(complex) + __int__ = _ProxyLookup(int) + __float__ = _ProxyLookup(float) + __index__ = _ProxyLookup(operator.index) + __round__ = _ProxyLookup(round) + __trunc__ = _ProxyLookup(math.trunc) + __floor__ = _ProxyLookup(math.floor) + __ceil__ = _ProxyLookup(math.ceil) + __enter__ = _ProxyLookup() + __exit__ = _ProxyLookup() + __await__ = _ProxyLookup() + __aiter__ = _ProxyLookup() + __anext__ = _ProxyLookup() + __aenter__ = _ProxyLookup() + __aexit__ = _ProxyLookup() + __copy__ = _ProxyLookup(copy.copy) + __deepcopy__ = _ProxyLookup(copy.deepcopy) + # __getnewargs_ex__ (pickle through proxy not supported) + # __getnewargs__ (pickle) + # __getstate__ (pickle) + # __setstate__ (pickle) + # __reduce__ (pickle) + # __reduce_ex__ (pickle) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py new file mode 100644 index 0000000..6ddcf7f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py @@ -0,0 +1,22 @@ +""" +Middleware +========== + +A WSGI middleware is a WSGI application that wraps another application +in order to observe or change its behavior. Werkzeug provides some +middleware for common use cases. + +.. toctree:: + :maxdepth: 1 + + proxy_fix + shared_data + dispatcher + http_proxy + lint + profiler + +The :doc:`interactive debugger ` is also a middleware that can +be applied manually, although it is typically used automatically with +the :doc:`development server `. +""" diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py new file mode 100644 index 0000000..ace1c75 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py @@ -0,0 +1,78 @@ +""" +Application Dispatcher +====================== + +This middleware creates a single WSGI application that dispatches to +multiple other WSGI applications mounted at different URL paths. + +A common example is writing a Single Page Application, where you have a +backend API and a frontend written in JavaScript that does the routing +in the browser rather than requesting different pages from the server. +The frontend is a single HTML and JS file that should be served for any +path besides "/api". + +This example dispatches to an API app under "/api", an admin app +under "/admin", and an app that serves frontend files for all other +requests:: + + app = DispatcherMiddleware(serve_frontend, { + '/api': api_app, + '/admin': admin_app, + }) + +In production, you might instead handle this at the HTTP server level, +serving files or proxying to application servers based on location. The +API and admin apps would each be deployed with a separate WSGI server, +and the static files would be served directly by the HTTP server. + +.. autoclass:: DispatcherMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import typing as t + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class DispatcherMiddleware: + """Combine multiple applications as a single WSGI application. + Requests are dispatched to an application based on the path it is + mounted under. + + :param app: The WSGI application to dispatch to if the request + doesn't match a mounted path. + :param mounts: Maps path prefixes to applications for dispatching. + """ + + def __init__( + self, + app: "WSGIApplication", + mounts: t.Optional[t.Dict[str, "WSGIApplication"]] = None, + ) -> None: + self.app = app + self.mounts = mounts or {} + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + script = environ.get("PATH_INFO", "") + path_info = "" + + while "/" in script: + if script in self.mounts: + app = self.mounts[script] + break + + script, last_item = script.rsplit("/", 1) + path_info = f"/{last_item}{path_info}" + else: + app = self.mounts.get(script, self.app) + + original_script_name = environ.get("SCRIPT_NAME", "") + environ["SCRIPT_NAME"] = original_script_name + script + environ["PATH_INFO"] = path_info + return app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py new file mode 100644 index 0000000..1cde458 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py @@ -0,0 +1,230 @@ +""" +Basic HTTP Proxy +================ + +.. autoclass:: ProxyMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import typing as t +from http import client + +from ..datastructures import EnvironHeaders +from ..http import is_hop_by_hop_header +from ..urls import url_parse +from ..urls import url_quote +from ..wsgi import get_input_stream + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProxyMiddleware: + """Proxy requests under a path to an external server, routing other + requests to the app. + + This middleware can only proxy HTTP requests, as HTTP is the only + protocol handled by the WSGI server. Other protocols, such as + WebSocket requests, cannot be proxied at this layer. This should + only be used for development, in production a real proxy server + should be used. + + The middleware takes a dict mapping a path prefix to a dict + describing the host to be proxied to:: + + app = ProxyMiddleware(app, { + "/static/": { + "target": "http://127.0.0.1:5001/", + } + }) + + Each host has the following options: + + ``target``: + The target URL to dispatch to. This is required. + ``remove_prefix``: + Whether to remove the prefix from the URL before dispatching it + to the target. The default is ``False``. + ``host``: + ``""`` (default): + The host header is automatically rewritten to the URL of the + target. + ``None``: + The host header is unmodified from the client request. + Any other value: + The host header is overwritten with the value. + ``headers``: + A dictionary of headers to be sent with the request to the + target. The default is ``{}``. + ``ssl_context``: + A :class:`ssl.SSLContext` defining how to verify requests if the + target is HTTPS. The default is ``None``. + + In the example above, everything under ``"/static/"`` is proxied to + the server on port 5001. The host header is rewritten to the target, + and the ``"/static/"`` prefix is removed from the URLs. + + :param app: The WSGI application to wrap. + :param targets: Proxy target configurations. See description above. + :param chunk_size: Size of chunks to read from input stream and + write to target. + :param timeout: Seconds before an operation to a target fails. + + .. versionadded:: 0.14 + """ + + def __init__( + self, + app: "WSGIApplication", + targets: t.Mapping[str, t.Dict[str, t.Any]], + chunk_size: int = 2 << 13, + timeout: int = 10, + ) -> None: + def _set_defaults(opts: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: + opts.setdefault("remove_prefix", False) + opts.setdefault("host", "") + opts.setdefault("headers", {}) + opts.setdefault("ssl_context", None) + return opts + + self.app = app + self.targets = { + f"/{k.strip('/')}/": _set_defaults(v) for k, v in targets.items() + } + self.chunk_size = chunk_size + self.timeout = timeout + + def proxy_to( + self, opts: t.Dict[str, t.Any], path: str, prefix: str + ) -> "WSGIApplication": + target = url_parse(opts["target"]) + host = t.cast(str, target.ascii_host) + + def application( + environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + headers = list(EnvironHeaders(environ).items()) + headers[:] = [ + (k, v) + for k, v in headers + if not is_hop_by_hop_header(k) + and k.lower() not in ("content-length", "host") + ] + headers.append(("Connection", "close")) + + if opts["host"] == "": + headers.append(("Host", host)) + elif opts["host"] is None: + headers.append(("Host", environ["HTTP_HOST"])) + else: + headers.append(("Host", opts["host"])) + + headers.extend(opts["headers"].items()) + remote_path = path + + if opts["remove_prefix"]: + remote_path = remote_path[len(prefix) :].lstrip("/") + remote_path = f"{target.path.rstrip('/')}/{remote_path}" + + content_length = environ.get("CONTENT_LENGTH") + chunked = False + + if content_length not in ("", None): + headers.append(("Content-Length", content_length)) # type: ignore + elif content_length is not None: + headers.append(("Transfer-Encoding", "chunked")) + chunked = True + + try: + if target.scheme == "http": + con = client.HTTPConnection( + host, target.port or 80, timeout=self.timeout + ) + elif target.scheme == "https": + con = client.HTTPSConnection( + host, + target.port or 443, + timeout=self.timeout, + context=opts["ssl_context"], + ) + else: + raise RuntimeError( + "Target scheme must be 'http' or 'https', got" + f" {target.scheme!r}." + ) + + con.connect() + remote_url = url_quote(remote_path) + querystring = environ["QUERY_STRING"] + + if querystring: + remote_url = f"{remote_url}?{querystring}" + + con.putrequest(environ["REQUEST_METHOD"], remote_url, skip_host=True) + + for k, v in headers: + if k.lower() == "connection": + v = "close" + + con.putheader(k, v) + + con.endheaders() + stream = get_input_stream(environ) + + while True: + data = stream.read(self.chunk_size) + + if not data: + break + + if chunked: + con.send(b"%x\r\n%s\r\n" % (len(data), data)) + else: + con.send(data) + + resp = con.getresponse() + except OSError: + from ..exceptions import BadGateway + + return BadGateway()(environ, start_response) + + start_response( + f"{resp.status} {resp.reason}", + [ + (k.title(), v) + for k, v in resp.getheaders() + if not is_hop_by_hop_header(k) + ], + ) + + def read() -> t.Iterator[bytes]: + while True: + try: + data = resp.read(self.chunk_size) + except OSError: + break + + if not data: + break + + yield data + + return read() + + return application + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + path = environ["PATH_INFO"] + app = self.app + + for prefix, opts in self.targets.items(): + if path.startswith(prefix): + app = self.proxy_to(opts, path, prefix) + break + + return app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py new file mode 100644 index 0000000..c74703b --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py @@ -0,0 +1,420 @@ +""" +WSGI Protocol Linter +==================== + +This module provides a middleware that performs sanity checks on the +behavior of the WSGI server and application. It checks that the +:pep:`3333` WSGI spec is properly implemented. It also warns on some +common HTTP errors such as non-empty responses for 304 status codes. + +.. autoclass:: LintMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import typing as t +from types import TracebackType +from urllib.parse import urlparse +from warnings import warn + +from ..datastructures import Headers +from ..http import is_entity_header +from ..wsgi import FileWrapper + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class WSGIWarning(Warning): + """Warning class for WSGI warnings.""" + + +class HTTPWarning(Warning): + """Warning class for HTTP warnings.""" + + +def check_type(context: str, obj: object, need: t.Type = str) -> None: + if type(obj) is not need: + warn( + f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.", + WSGIWarning, + stacklevel=3, + ) + + +class InputStream: + def __init__(self, stream: t.IO[bytes]) -> None: + self._stream = stream + + def read(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "WSGI does not guarantee an EOF marker on the input stream, thus making" + " calls to 'wsgi.input.read()' unsafe. Conforming servers may never" + " return from this call.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) != 1: + warn( + "Too many parameters passed to 'wsgi.input.read()'.", + WSGIWarning, + stacklevel=2, + ) + return self._stream.read(*args) + + def readline(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use" + " 'wsgi.input.read()' instead.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) == 1: + warn( + "'wsgi.input.readline()' was called with a size hint. WSGI does not" + " support this, although it's available on all major servers.", + WSGIWarning, + stacklevel=2, + ) + else: + raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.") + return self._stream.readline(*args) + + def __iter__(self) -> t.Iterator[bytes]: + try: + return iter(self._stream) + except TypeError: + warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2) + return iter(()) + + def close(self) -> None: + warn("The application closed the input stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class ErrorStream: + def __init__(self, stream: t.IO[str]) -> None: + self._stream = stream + + def write(self, s: str) -> None: + check_type("wsgi.error.write()", s, str) + self._stream.write(s) + + def flush(self) -> None: + self._stream.flush() + + def writelines(self, seq: t.Iterable[str]) -> None: + for line in seq: + self.write(line) + + def close(self) -> None: + warn("The application closed the error stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class GuardedWrite: + def __init__(self, write: t.Callable[[bytes], None], chunks: t.List[int]) -> None: + self._write = write + self._chunks = chunks + + def __call__(self, s: bytes) -> None: + check_type("write()", s, bytes) + self._write(s) + self._chunks.append(len(s)) + + +class GuardedIterator: + def __init__( + self, + iterator: t.Iterable[bytes], + headers_set: t.Tuple[int, Headers], + chunks: t.List[int], + ) -> None: + self._iterator = iterator + self._next = iter(iterator).__next__ + self.closed = False + self.headers_set = headers_set + self.chunks = chunks + + def __iter__(self) -> "GuardedIterator": + return self + + def __next__(self) -> bytes: + if self.closed: + warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2) + + rv = self._next() + + if not self.headers_set: + warn( + "The application returned before it started the response.", + WSGIWarning, + stacklevel=2, + ) + + check_type("application iterator items", rv, bytes) + self.chunks.append(len(rv)) + return rv + + def close(self) -> None: + self.closed = True + + if hasattr(self._iterator, "close"): + self._iterator.close() # type: ignore + + if self.headers_set: + status_code, headers = self.headers_set + bytes_sent = sum(self.chunks) + content_length = headers.get("content-length", type=int) + + if status_code == 304: + for key, _value in headers: + key = key.lower() + if key not in ("expires", "content-location") and is_entity_header( + key + ): + warn( + f"Entity header {key!r} found in 304 response.", HTTPWarning + ) + if bytes_sent: + warn("304 responses must not have a body.", HTTPWarning) + elif 100 <= status_code < 200 or status_code == 204: + if content_length != 0: + warn( + f"{status_code} responses must have an empty content length.", + HTTPWarning, + ) + if bytes_sent: + warn(f"{status_code} responses must not have a body.", HTTPWarning) + elif content_length is not None and content_length != bytes_sent: + warn( + "Content-Length and the number of bytes sent to the" + " client do not match.", + WSGIWarning, + ) + + def __del__(self) -> None: + if not self.closed: + try: + warn( + "Iterator was garbage collected before it was closed.", WSGIWarning + ) + except Exception: + pass + + +class LintMiddleware: + """Warns about common errors in the WSGI and HTTP behavior of the + server and wrapped application. Some of the issues it checks are: + + - invalid status codes + - non-bytes sent to the WSGI server + - strings returned from the WSGI application + - non-empty conditional responses + - unquoted etags + - relative URLs in the Location header + - unsafe calls to wsgi.input + - unclosed iterators + + Error information is emitted using the :mod:`warnings` module. + + :param app: The WSGI application to wrap. + + .. code-block:: python + + from werkzeug.middleware.lint import LintMiddleware + app = LintMiddleware(app) + """ + + def __init__(self, app: "WSGIApplication") -> None: + self.app = app + + def check_environ(self, environ: "WSGIEnvironment") -> None: + if type(environ) is not dict: + warn( + "WSGI environment is not a standard Python dict.", + WSGIWarning, + stacklevel=4, + ) + for key in ( + "REQUEST_METHOD", + "SERVER_NAME", + "SERVER_PORT", + "wsgi.version", + "wsgi.input", + "wsgi.errors", + "wsgi.multithread", + "wsgi.multiprocess", + "wsgi.run_once", + ): + if key not in environ: + warn( + f"Required environment key {key!r} not found", + WSGIWarning, + stacklevel=3, + ) + if environ["wsgi.version"] != (1, 0): + warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3) + + script_name = environ.get("SCRIPT_NAME", "") + path_info = environ.get("PATH_INFO", "") + + if script_name and script_name[0] != "/": + warn( + f"'SCRIPT_NAME' does not start with a slash: {script_name!r}", + WSGIWarning, + stacklevel=3, + ) + + if path_info and path_info[0] != "/": + warn( + f"'PATH_INFO' does not start with a slash: {path_info!r}", + WSGIWarning, + stacklevel=3, + ) + + def check_start_response( + self, + status: str, + headers: t.List[t.Tuple[str, str]], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ], + ) -> t.Tuple[int, Headers]: + check_type("status", status, str) + status_code_str = status.split(None, 1)[0] + + if len(status_code_str) != 3 or not status_code_str.isdigit(): + warn("Status code must be three digits.", WSGIWarning, stacklevel=3) + + if len(status) < 4 or status[3] != " ": + warn( + f"Invalid value for status {status!r}. Valid status strings are three" + " digits, a space and a status explanation.", + WSGIWarning, + stacklevel=3, + ) + + status_code = int(status_code_str) + + if status_code < 100: + warn("Status code < 100 detected.", WSGIWarning, stacklevel=3) + + if type(headers) is not list: + warn("Header list is not a list.", WSGIWarning, stacklevel=3) + + for item in headers: + if type(item) is not tuple or len(item) != 2: + warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3) + name, value = item + if type(name) is not str or type(value) is not str: + warn( + "Header keys and values must be strings.", WSGIWarning, stacklevel=3 + ) + if name.lower() == "status": + warn( + "The status header is not supported due to" + " conflicts with the CGI spec.", + WSGIWarning, + stacklevel=3, + ) + + if exc_info is not None and not isinstance(exc_info, tuple): + warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) + + headers = Headers(headers) + self.check_headers(headers) + + return status_code, headers + + def check_headers(self, headers: Headers) -> None: + etag = headers.get("etag") + + if etag is not None: + if etag.startswith(("W/", "w/")): + if etag.startswith("w/"): + warn( + "Weak etag indicator should be upper case.", + HTTPWarning, + stacklevel=4, + ) + + etag = etag[2:] + + if not (etag[:1] == etag[-1:] == '"'): + warn("Unquoted etag emitted.", HTTPWarning, stacklevel=4) + + location = headers.get("location") + + if location is not None: + if not urlparse(location).netloc: + warn( + "Absolute URLs required for location header.", + HTTPWarning, + stacklevel=4, + ) + + def check_iterator(self, app_iter: t.Iterable[bytes]) -> None: + if isinstance(app_iter, bytes): + warn( + "The application returned a bytestring. The response will send one" + " character at a time to the client, which will kill performance." + " Return a list or iterable instead.", + WSGIWarning, + stacklevel=3, + ) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Iterable[bytes]: + if len(args) != 2: + warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2) + + if kwargs: + warn( + "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2 + ) + + environ: "WSGIEnvironment" = args[0] + start_response: "StartResponse" = args[1] + + self.check_environ(environ) + environ["wsgi.input"] = InputStream(environ["wsgi.input"]) + environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"]) + + # Hook our own file wrapper in so that applications will always + # iterate to the end and we can check the content length. + environ["wsgi.file_wrapper"] = FileWrapper + + headers_set: t.List[t.Any] = [] + chunks: t.List[int] = [] + + def checking_start_response( + *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[bytes], None]: + if len(args) not in {2, 3}: + warn( + f"Invalid number of arguments: {len(args)}, expected 2 or 3.", + WSGIWarning, + stacklevel=2, + ) + + if kwargs: + warn("'start_response' does not take keyword arguments.", WSGIWarning) + + status: str = args[0] + headers: t.List[t.Tuple[str, str]] = args[1] + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = (args[2] if len(args) == 3 else None) + + headers_set[:] = self.check_start_response(status, headers, exc_info) + return GuardedWrite(start_response(status, headers, exc_info), chunks) + + app_iter = self.app(environ, t.cast("StartResponse", checking_start_response)) + self.check_iterator(app_iter) + return GuardedIterator( + app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py new file mode 100644 index 0000000..200dae0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py @@ -0,0 +1,139 @@ +""" +Application Profiler +==================== + +This module provides a middleware that profiles each request with the +:mod:`cProfile` module. This can help identify bottlenecks in your code +that may be slowing down your application. + +.. autoclass:: ProfilerMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import os.path +import sys +import time +import typing as t +from pstats import Stats + +try: + from cProfile import Profile +except ImportError: + from profile import Profile # type: ignore + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProfilerMiddleware: + """Wrap a WSGI application and profile the execution of each + request. Responses are buffered so that timings are more exact. + + If ``stream`` is given, :class:`pstats.Stats` are written to it + after each request. If ``profile_dir`` is given, :mod:`cProfile` + data files are saved to that directory, one file per request. + + The filename can be customized by passing ``filename_format``. If + it is a string, it will be formatted using :meth:`str.format` with + the following fields available: + + - ``{method}`` - The request method; GET, POST, etc. + - ``{path}`` - The request path or 'root' should one not exist. + - ``{elapsed}`` - The elapsed time of the request. + - ``{time}`` - The time of the request. + + If it is a callable, it will be called with the WSGI ``environ`` + dict and should return a filename. + + :param app: The WSGI application to wrap. + :param stream: Write stats to this stream. Disable with ``None``. + :param sort_by: A tuple of columns to sort stats by. See + :meth:`pstats.Stats.sort_stats`. + :param restrictions: A tuple of restrictions to filter stats by. See + :meth:`pstats.Stats.print_stats`. + :param profile_dir: Save profile data files to this directory. + :param filename_format: Format string for profile data file names, + or a callable returning a name. See explanation above. + + .. code-block:: python + + from werkzeug.middleware.profiler import ProfilerMiddleware + app = ProfilerMiddleware(app) + + .. versionchanged:: 0.15 + Stats are written even if ``profile_dir`` is given, and can be + disable by passing ``stream=None``. + + .. versionadded:: 0.15 + Added ``filename_format``. + + .. versionadded:: 0.9 + Added ``restrictions`` and ``profile_dir``. + """ + + def __init__( + self, + app: "WSGIApplication", + stream: t.IO[str] = sys.stdout, + sort_by: t.Iterable[str] = ("time", "calls"), + restrictions: t.Iterable[t.Union[str, int, float]] = (), + profile_dir: t.Optional[str] = None, + filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof", + ) -> None: + self._app = app + self._stream = stream + self._sort_by = sort_by + self._restrictions = restrictions + self._profile_dir = profile_dir + self._filename_format = filename_format + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + response_body: t.List[bytes] = [] + + def catching_start_response(status, headers, exc_info=None): # type: ignore + start_response(status, headers, exc_info) + return response_body.append + + def runapp() -> None: + app_iter = self._app( + environ, t.cast("StartResponse", catching_start_response) + ) + response_body.extend(app_iter) + + if hasattr(app_iter, "close"): + app_iter.close() # type: ignore + + profile = Profile() + start = time.time() + profile.runcall(runapp) + body = b"".join(response_body) + elapsed = time.time() - start + + if self._profile_dir is not None: + if callable(self._filename_format): + filename = self._filename_format(environ) + else: + filename = self._filename_format.format( + method=environ["REQUEST_METHOD"], + path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root", + elapsed=elapsed * 1000.0, + time=time.time(), + ) + filename = os.path.join(self._profile_dir, filename) + profile.dump_stats(filename) + + if self._stream is not None: + stats = Stats(profile, stream=self._stream) + stats.sort_stats(*self._sort_by) + print("-" * 80, file=self._stream) + path_info = environ.get("PATH_INFO", "") + print(f"PATH: {path_info!r}", file=self._stream) + stats.print_stats(*self._restrictions) + print(f"{'-' * 80}\n", file=self._stream) + + return [body] diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py new file mode 100644 index 0000000..e90b1b3 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py @@ -0,0 +1,187 @@ +""" +X-Forwarded-For Proxy Fix +========================= + +This module provides a middleware that adjusts the WSGI environ based on +``X-Forwarded-`` headers that proxies in front of an application may +set. + +When an application is running behind a proxy server, WSGI may see the +request as coming from that server rather than the real client. Proxies +set various headers to track where the request actually came from. + +This middleware should only be used if the application is actually +behind such a proxy, and should be configured with the number of proxies +that are chained in front of it. Not all proxies set all the headers. +Since incoming headers can be faked, you must set how many proxies are +setting each header so the middleware knows what to trust. + +.. autoclass:: ProxyFix + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import typing as t + +from ..http import parse_list_header + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProxyFix: + """Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in + front of the application may set. + + - ``X-Forwarded-For`` sets ``REMOTE_ADDR``. + - ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``. + - ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and + ``SERVER_PORT``. + - ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``. + - ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``. + + You must tell the middleware how many proxies set each header so it + knows what values to trust. It is a security issue to trust values + that came from the client rather than a proxy. + + The original values of the headers are stored in the WSGI + environ as ``werkzeug.proxy_fix.orig``, a dict. + + :param app: The WSGI application to wrap. + :param x_for: Number of values to trust for ``X-Forwarded-For``. + :param x_proto: Number of values to trust for ``X-Forwarded-Proto``. + :param x_host: Number of values to trust for ``X-Forwarded-Host``. + :param x_port: Number of values to trust for ``X-Forwarded-Port``. + :param x_prefix: Number of values to trust for + ``X-Forwarded-Prefix``. + + .. code-block:: python + + from werkzeug.middleware.proxy_fix import ProxyFix + # App is behind one proxy that sets the -For and -Host headers. + app = ProxyFix(app, x_for=1, x_host=1) + + .. versionchanged:: 1.0 + Deprecated code has been removed: + + * The ``num_proxies`` argument and attribute. + * The ``get_remote_addr`` method. + * The environ keys ``orig_remote_addr``, + ``orig_wsgi_url_scheme``, and ``orig_http_host``. + + .. versionchanged:: 0.15 + All headers support multiple values. The ``num_proxies`` + argument is deprecated. Each header is configured with a + separate number of trusted proxies. + + .. versionchanged:: 0.15 + Original WSGI environ values are stored in the + ``werkzeug.proxy_fix.orig`` dict. ``orig_remote_addr``, + ``orig_wsgi_url_scheme``, and ``orig_http_host`` are deprecated + and will be removed in 1.0. + + .. versionchanged:: 0.15 + Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``. + + .. versionchanged:: 0.15 + ``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify + ``SERVER_NAME`` and ``SERVER_PORT``. + """ + + def __init__( + self, + app: "WSGIApplication", + x_for: int = 1, + x_proto: int = 1, + x_host: int = 0, + x_port: int = 0, + x_prefix: int = 0, + ) -> None: + self.app = app + self.x_for = x_for + self.x_proto = x_proto + self.x_host = x_host + self.x_port = x_port + self.x_prefix = x_prefix + + def _get_real_value(self, trusted: int, value: t.Optional[str]) -> t.Optional[str]: + """Get the real value from a list header based on the configured + number of trusted proxies. + + :param trusted: Number of values to trust in the header. + :param value: Comma separated list header value to parse. + :return: The real value, or ``None`` if there are fewer values + than the number of trusted proxies. + + .. versionchanged:: 1.0 + Renamed from ``_get_trusted_comma``. + + .. versionadded:: 0.15 + """ + if not (trusted and value): + return None + values = parse_list_header(value) + if len(values) >= trusted: + return values[-trusted] + return None + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + """Modify the WSGI environ based on the various ``Forwarded`` + headers before calling the wrapped application. Store the + original environ values in ``werkzeug.proxy_fix.orig_{key}``. + """ + environ_get = environ.get + orig_remote_addr = environ_get("REMOTE_ADDR") + orig_wsgi_url_scheme = environ_get("wsgi.url_scheme") + orig_http_host = environ_get("HTTP_HOST") + environ.update( + { + "werkzeug.proxy_fix.orig": { + "REMOTE_ADDR": orig_remote_addr, + "wsgi.url_scheme": orig_wsgi_url_scheme, + "HTTP_HOST": orig_http_host, + "SERVER_NAME": environ_get("SERVER_NAME"), + "SERVER_PORT": environ_get("SERVER_PORT"), + "SCRIPT_NAME": environ_get("SCRIPT_NAME"), + } + } + ) + + x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR")) + if x_for: + environ["REMOTE_ADDR"] = x_for + + x_proto = self._get_real_value( + self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO") + ) + if x_proto: + environ["wsgi.url_scheme"] = x_proto + + x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST")) + if x_host: + environ["HTTP_HOST"] = x_host + parts = x_host.split(":", 1) + environ["SERVER_NAME"] = parts[0] + if len(parts) == 2: + environ["SERVER_PORT"] = parts[1] + + x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT")) + if x_port: + host = environ.get("HTTP_HOST") + if host: + parts = host.split(":", 1) + host = parts[0] if len(parts) == 2 else host + environ["HTTP_HOST"] = f"{host}:{x_port}" + environ["SERVER_PORT"] = x_port + + x_prefix = self._get_real_value( + self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX") + ) + if x_prefix: + environ["SCRIPT_NAME"] = x_prefix + + return self.app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py new file mode 100644 index 0000000..62da672 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py @@ -0,0 +1,320 @@ +""" +Serve Shared Static Files +========================= + +.. autoclass:: SharedDataMiddleware + :members: is_allowed + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +import mimetypes +import os +import pkgutil +import posixpath +import typing as t +from datetime import datetime +from datetime import timezone +from io import BytesIO +from time import time +from zlib import adler32 + +from ..filesystem import get_filesystem_encoding +from ..http import http_date +from ..http import is_resource_modified +from ..security import safe_join +from ..utils import get_content_type +from ..wsgi import get_path_info +from ..wsgi import wrap_file + +_TOpener = t.Callable[[], t.Tuple[t.IO[bytes], datetime, int]] +_TLoader = t.Callable[[t.Optional[str]], t.Tuple[t.Optional[str], t.Optional[_TOpener]]] + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class SharedDataMiddleware: + + """A WSGI middleware which provides static content for development + environments or simple server setups. Its usage is quite simple:: + + import os + from werkzeug.middleware.shared_data import SharedDataMiddleware + + app = SharedDataMiddleware(app, { + '/shared': os.path.join(os.path.dirname(__file__), 'shared') + }) + + The contents of the folder ``./shared`` will now be available on + ``http://example.com/shared/``. This is pretty useful during development + because a standalone media server is not required. Files can also be + mounted on the root folder and still continue to use the application because + the shared data middleware forwards all unhandled requests to the + application, even if the requests are below one of the shared folders. + + If `pkg_resources` is available you can also tell the middleware to serve + files from package data:: + + app = SharedDataMiddleware(app, { + '/static': ('myapplication', 'static') + }) + + This will then serve the ``static`` folder in the `myapplication` + Python package. + + The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch` + rules for files that are not accessible from the web. If `cache` is set to + `False` no caching headers are sent. + + Currently the middleware does not support non-ASCII filenames. If the + encoding on the file system happens to match the encoding of the URI it may + work but this could also be by accident. We strongly suggest using ASCII + only file names for static files. + + The middleware will guess the mimetype using the Python `mimetype` + module. If it's unable to figure out the charset it will fall back + to `fallback_mimetype`. + + :param app: the application to wrap. If you don't want to wrap an + application you can pass it :exc:`NotFound`. + :param exports: a list or dict of exported files and folders. + :param disallow: a list of :func:`~fnmatch.fnmatch` rules. + :param cache: enable or disable caching headers. + :param cache_timeout: the cache timeout in seconds for the headers. + :param fallback_mimetype: The fallback mimetype for unknown files. + + .. versionchanged:: 1.0 + The default ``fallback_mimetype`` is + ``application/octet-stream``. If a filename looks like a text + mimetype, the ``utf-8`` charset is added to it. + + .. versionadded:: 0.6 + Added ``fallback_mimetype``. + + .. versionchanged:: 0.5 + Added ``cache_timeout``. + """ + + def __init__( + self, + app: "WSGIApplication", + exports: t.Union[ + t.Dict[str, t.Union[str, t.Tuple[str, str]]], + t.Iterable[t.Tuple[str, t.Union[str, t.Tuple[str, str]]]], + ], + disallow: None = None, + cache: bool = True, + cache_timeout: int = 60 * 60 * 12, + fallback_mimetype: str = "application/octet-stream", + ) -> None: + self.app = app + self.exports: t.List[t.Tuple[str, _TLoader]] = [] + self.cache = cache + self.cache_timeout = cache_timeout + + if isinstance(exports, dict): + exports = exports.items() + + for key, value in exports: + if isinstance(value, tuple): + loader = self.get_package_loader(*value) + elif isinstance(value, str): + if os.path.isfile(value): + loader = self.get_file_loader(value) + else: + loader = self.get_directory_loader(value) + else: + raise TypeError(f"unknown def {value!r}") + + self.exports.append((key, loader)) + + if disallow is not None: + from fnmatch import fnmatch + + self.is_allowed = lambda x: not fnmatch(x, disallow) + + self.fallback_mimetype = fallback_mimetype + + def is_allowed(self, filename: str) -> bool: + """Subclasses can override this method to disallow the access to + certain files. However by providing `disallow` in the constructor + this method is overwritten. + """ + return True + + def _opener(self, filename: str) -> _TOpener: + return lambda: ( + open(filename, "rb"), + datetime.fromtimestamp(os.path.getmtime(filename), tz=timezone.utc), + int(os.path.getsize(filename)), + ) + + def get_file_loader(self, filename: str) -> _TLoader: + return lambda x: (os.path.basename(filename), self._opener(filename)) + + def get_package_loader(self, package: str, package_path: str) -> _TLoader: + load_time = datetime.now(timezone.utc) + provider = pkgutil.get_loader(package) + + if hasattr(provider, "get_resource_reader"): + # Python 3 + reader = provider.get_resource_reader(package) # type: ignore + + def loader( + path: t.Optional[str], + ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: + if path is None: + return None, None + + path = safe_join(package_path, path) + + if path is None: + return None, None + + basename = posixpath.basename(path) + + try: + resource = reader.open_resource(path) + except OSError: + return None, None + + if isinstance(resource, BytesIO): + return ( + basename, + lambda: (resource, load_time, len(resource.getvalue())), + ) + + return ( + basename, + lambda: ( + resource, + datetime.fromtimestamp( + os.path.getmtime(resource.name), tz=timezone.utc + ), + os.path.getsize(resource.name), + ), + ) + + else: + # Python 3.6 + package_filename = provider.get_filename(package) # type: ignore + is_filesystem = os.path.exists(package_filename) + root = os.path.join(os.path.dirname(package_filename), package_path) + + def loader( + path: t.Optional[str], + ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: + if path is None: + return None, None + + path = safe_join(root, path) + + if path is None: + return None, None + + basename = posixpath.basename(path) + + if is_filesystem: + if not os.path.isfile(path): + return None, None + + return basename, self._opener(path) + + try: + data = provider.get_data(path) # type: ignore + except OSError: + return None, None + + return basename, lambda: (BytesIO(data), load_time, len(data)) + + return loader + + def get_directory_loader(self, directory: str) -> _TLoader: + def loader( + path: t.Optional[str], + ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: + if path is not None: + path = safe_join(directory, path) + + if path is None: + return None, None + else: + path = directory + + if os.path.isfile(path): + return os.path.basename(path), self._opener(path) + + return None, None + + return loader + + def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str: + if not isinstance(real_filename, bytes): + real_filename = real_filename.encode( # type: ignore + get_filesystem_encoding() + ) + + timestamp = mtime.timestamp() + checksum = adler32(real_filename) & 0xFFFFFFFF # type: ignore + return f"wzsdm-{timestamp}-{file_size}-{checksum}" + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + path = get_path_info(environ) + file_loader = None + + for search_path, loader in self.exports: + if search_path == path: + real_filename, file_loader = loader(None) + + if file_loader is not None: + break + + if not search_path.endswith("/"): + search_path += "/" + + if path.startswith(search_path): + real_filename, file_loader = loader(path[len(search_path) :]) + + if file_loader is not None: + break + + if file_loader is None or not self.is_allowed(real_filename): # type: ignore + return self.app(environ, start_response) + + guessed_type = mimetypes.guess_type(real_filename) # type: ignore + mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, "utf-8") + f, mtime, file_size = file_loader() + + headers = [("Date", http_date())] + + if self.cache: + timeout = self.cache_timeout + etag = self.generate_etag(mtime, file_size, real_filename) # type: ignore + headers += [ + ("Etag", f'"{etag}"'), + ("Cache-Control", f"max-age={timeout}, public"), + ] + + if not is_resource_modified(environ, etag, last_modified=mtime): + f.close() + start_response("304 Not Modified", headers) + return [] + + headers.append(("Expires", http_date(time() + timeout))) + else: + headers.append(("Cache-Control", "public")) + + headers.extend( + ( + ("Content-Type", mime_type), + ("Content-Length", str(file_size)), + ("Last-Modified", http_date(mtime)), + ) + ) + start_response("200 OK", headers) + return wrap_file(environ, f) diff --git a/venv/lib/python3.8/site-packages/werkzeug/py.typed b/venv/lib/python3.8/site-packages/werkzeug/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing.py b/venv/lib/python3.8/site-packages/werkzeug/routing.py new file mode 100644 index 0000000..ddb8ef9 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/routing.py @@ -0,0 +1,2341 @@ +"""When it comes to combining multiple controller or view functions +(however you want to call them) you need a dispatcher. A simple way +would be applying regular expression tests on the ``PATH_INFO`` and +calling registered callback functions that return the value then. + +This module implements a much more powerful system than simple regular +expression matching because it can also convert values in the URLs and +build URLs. + +Here a simple example that creates a URL map for an application with +two subdomains (www and kb) and some URL rules: + +.. code-block:: python + + m = Map([ + # Static URLs + Rule('/', endpoint='static/index'), + Rule('/about', endpoint='static/about'), + Rule('/help', endpoint='static/help'), + # Knowledge Base + Subdomain('kb', [ + Rule('/', endpoint='kb/index'), + Rule('/browse/', endpoint='kb/browse'), + Rule('/browse//', endpoint='kb/browse'), + Rule('/browse//', endpoint='kb/browse') + ]) + ], default_subdomain='www') + +If the application doesn't use subdomains it's perfectly fine to not set +the default subdomain and not use the `Subdomain` rule factory. The +endpoint in the rules can be anything, for example import paths or +unique identifiers. The WSGI application can use those endpoints to get the +handler for that URL. It doesn't have to be a string at all but it's +recommended. + +Now it's possible to create a URL adapter for one of the subdomains and +build URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.build("kb/browse", dict(id=42)) + 'http://kb.example.com/browse/42/' + + c.build("kb/browse", dict()) + 'http://kb.example.com/browse/' + + c.build("kb/browse", dict(id=42, page=3)) + 'http://kb.example.com/browse/42/3' + + c.build("static/about") + '/about' + + c.build("static/index", force_external=True) + 'http://www.example.com/' + + c = m.bind('example.com', subdomain='kb') + + c.build("static/about") + 'http://www.example.com/about' + +The first argument to bind is the server name *without* the subdomain. +Per default it will assume that the script is mounted on the root, but +often that's not the case so you can provide the real mount point as +second argument: + +.. code-block:: python + + c = m.bind('example.com', '/applications/example') + +The third argument can be the subdomain, if not given the default +subdomain is used. For more details about binding have a look at the +documentation of the `MapAdapter`. + +And here is how you can match URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.match("/") + ('static/index', {}) + + c.match("/about") + ('static/about', {}) + + c = m.bind('example.com', '/', 'kb') + + c.match("/") + ('kb/index', {}) + + c.match("/browse/42/23") + ('kb/browse', {'id': 42, 'page': 23}) + +If matching fails you get a ``NotFound`` exception, if the rule thinks +it's a good idea to redirect (for example because the URL was defined +to have a slash at the end but the request was missing that slash) it +will raise a ``RequestRedirect`` exception. Both are subclasses of +``HTTPException`` so you can use those errors as responses in the +application. + +If matching succeeded but the URL rule was incompatible to the given +method (for example there were only rules for ``GET`` and ``HEAD`` but +routing tried to match a ``POST`` request) a ``MethodNotAllowed`` +exception is raised. +""" +import ast +import difflib +import posixpath +import re +import typing +import typing as t +import uuid +import warnings +from pprint import pformat +from string import Template +from threading import Lock +from types import CodeType + +from ._internal import _encode_idna +from ._internal import _get_environ +from ._internal import _to_bytes +from ._internal import _to_str +from ._internal import _wsgi_decoding_dance +from .datastructures import ImmutableDict +from .datastructures import MultiDict +from .exceptions import BadHost +from .exceptions import BadRequest +from .exceptions import HTTPException +from .exceptions import MethodNotAllowed +from .exceptions import NotFound +from .urls import _fast_url_quote +from .urls import url_encode +from .urls import url_join +from .urls import url_quote +from .urls import url_unquote +from .utils import cached_property +from .utils import redirect +from .wsgi import get_host + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from .wrappers.response import Response + +_rule_re = re.compile( + r""" + (?P[^<]*) # static rule data + < + (?: + (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name + (?:\((?P.*?)\))? # converter arguments + \: # variable delimiter + )? + (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name + > + """, + re.VERBOSE, +) +_simple_rule_re = re.compile(r"<([^>]+)>") +_converter_args_re = re.compile( + r""" + ((?P\w+)\s*=\s*)? + (?P + True|False| + \d+.\d+| + \d+.| + \d+| + [\w\d_.]+| + [urUR]?(?P"[^"]*?"|'[^']*') + )\s*, + """, + re.VERBOSE, +) + + +_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False} + + +def _pythonize(value: str) -> t.Union[None, bool, int, float, str]: + if value in _PYTHON_CONSTANTS: + return _PYTHON_CONSTANTS[value] + for convert in int, float: + try: + return convert(value) # type: ignore + except ValueError: + pass + if value[:1] == value[-1:] and value[0] in "\"'": + value = value[1:-1] + return str(value) + + +def parse_converter_args(argstr: str) -> t.Tuple[t.Tuple, t.Dict[str, t.Any]]: + argstr += "," + args = [] + kwargs = {} + + for item in _converter_args_re.finditer(argstr): + value = item.group("stringval") + if value is None: + value = item.group("value") + value = _pythonize(value) + if not item.group("name"): + args.append(value) + else: + name = item.group("name") + kwargs[name] = value + + return tuple(args), kwargs + + +def parse_rule(rule: str) -> t.Iterator[t.Tuple[t.Optional[str], t.Optional[str], str]]: + """Parse a rule and return it as generator. Each iteration yields tuples + in the form ``(converter, arguments, variable)``. If the converter is + `None` it's a static url part, otherwise it's a dynamic one. + + :internal: + """ + pos = 0 + end = len(rule) + do_match = _rule_re.match + used_names = set() + while pos < end: + m = do_match(rule, pos) + if m is None: + break + data = m.groupdict() + if data["static"]: + yield None, None, data["static"] + variable = data["variable"] + converter = data["converter"] or "default" + if variable in used_names: + raise ValueError(f"variable name {variable!r} used twice.") + used_names.add(variable) + yield converter, data["args"] or None, variable + pos = m.end() + if pos < end: + remaining = rule[pos:] + if ">" in remaining or "<" in remaining: + raise ValueError(f"malformed url rule: {rule!r}") + yield None, None, remaining + + +class RoutingException(Exception): + """Special exceptions that require the application to redirect, notifying + about missing urls, etc. + + :internal: + """ + + +class RequestRedirect(HTTPException, RoutingException): + """Raise if the map requests a redirect. This is for example the case if + `strict_slashes` are activated and an url that requires a trailing slash. + + The attribute `new_url` contains the absolute destination url. + """ + + code = 308 + + def __init__(self, new_url: str) -> None: + super().__init__(new_url) + self.new_url = new_url + + def get_response( + self, + environ: t.Optional["WSGIEnvironment"] = None, + scope: t.Optional[dict] = None, + ) -> "Response": + return redirect(self.new_url, self.code) + + +class RequestPath(RoutingException): + """Internal exception.""" + + __slots__ = ("path_info",) + + def __init__(self, path_info: str) -> None: + super().__init__() + self.path_info = path_info + + +class RequestAliasRedirect(RoutingException): # noqa: B903 + """This rule is an alias and wants to redirect to the canonical URL.""" + + def __init__(self, matched_values: t.Mapping[str, t.Any]) -> None: + super().__init__() + self.matched_values = matched_values + + +class BuildError(RoutingException, LookupError): + """Raised if the build system cannot find a URL for an endpoint with the + values provided. + """ + + def __init__( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: t.Optional[str], + adapter: t.Optional["MapAdapter"] = None, + ) -> None: + super().__init__(endpoint, values, method) + self.endpoint = endpoint + self.values = values + self.method = method + self.adapter = adapter + + @cached_property + def suggested(self) -> t.Optional["Rule"]: + return self.closest_rule(self.adapter) + + def closest_rule(self, adapter: t.Optional["MapAdapter"]) -> t.Optional["Rule"]: + def _score_rule(rule: "Rule") -> float: + return sum( + [ + 0.98 + * difflib.SequenceMatcher( + None, rule.endpoint, self.endpoint + ).ratio(), + 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), + 0.01 * bool(rule.methods and self.method in rule.methods), + ] + ) + + if adapter and adapter.map._rules: + return max(adapter.map._rules, key=_score_rule) + + return None + + def __str__(self) -> str: + message = [f"Could not build url for endpoint {self.endpoint!r}"] + if self.method: + message.append(f" ({self.method!r})") + if self.values: + message.append(f" with values {sorted(self.values)!r}") + message.append(".") + if self.suggested: + if self.endpoint == self.suggested.endpoint: + if ( + self.method + and self.suggested.methods is not None + and self.method not in self.suggested.methods + ): + message.append( + " Did you mean to use methods" + f" {sorted(self.suggested.methods)!r}?" + ) + missing_values = self.suggested.arguments.union( + set(self.suggested.defaults or ()) + ) - set(self.values.keys()) + if missing_values: + message.append( + f" Did you forget to specify values {sorted(missing_values)!r}?" + ) + else: + message.append(f" Did you mean {self.suggested.endpoint!r} instead?") + return "".join(message) + + +class WebsocketMismatch(BadRequest): + """The only matched rule is either a WebSocket and the request is + HTTP, or the rule is HTTP and the request is a WebSocket. + """ + + +class ValidationError(ValueError): + """Validation error. If a rule converter raises this exception the rule + does not match the current URL and the next URL is tried. + """ + + +class RuleFactory: + """As soon as you have more complex URL setups it's a good idea to use rule + factories to avoid repetitive tasks. Some of them are builtin, others can + be added by subclassing `RuleFactory` and overriding `get_rules`. + """ + + def get_rules(self, map: "Map") -> t.Iterable["Rule"]: + """Subclasses of `RuleFactory` have to override this method and return + an iterable of rules.""" + raise NotImplementedError() + + +class Subdomain(RuleFactory): + """All URLs provided by this factory have the subdomain set to a + specific domain. For example if you want to use the subdomain for + the current language this can be a good setup:: + + url_map = Map([ + Rule('/', endpoint='#select_language'), + Subdomain('', [ + Rule('/', endpoint='index'), + Rule('/about', endpoint='about'), + Rule('/help', endpoint='help') + ]) + ]) + + All the rules except for the ``'#select_language'`` endpoint will now + listen on a two letter long subdomain that holds the language code + for the current request. + """ + + def __init__(self, subdomain: str, rules: t.Iterable[RuleFactory]) -> None: + self.subdomain = subdomain + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.subdomain = self.subdomain + yield rule + + +class Submount(RuleFactory): + """Like `Subdomain` but prefixes the URL rule with a given string:: + + url_map = Map([ + Rule('/', endpoint='index'), + Submount('/blog', [ + Rule('/', endpoint='blog/index'), + Rule('/entry/', endpoint='blog/show') + ]) + ]) + + Now the rule ``'blog/show'`` matches ``/blog/entry/``. + """ + + def __init__(self, path: str, rules: t.Iterable[RuleFactory]) -> None: + self.path = path.rstrip("/") + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.rule = self.path + rule.rule + yield rule + + +class EndpointPrefix(RuleFactory): + """Prefixes all endpoints (which must be strings for this factory) with + another string. This can be useful for sub applications:: + + url_map = Map([ + Rule('/', endpoint='index'), + EndpointPrefix('blog/', [Submount('/blog', [ + Rule('/', endpoint='index'), + Rule('/entry/', endpoint='show') + ])]) + ]) + """ + + def __init__(self, prefix: str, rules: t.Iterable[RuleFactory]) -> None: + self.prefix = prefix + self.rules = rules + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.endpoint = self.prefix + rule.endpoint + yield rule + + +class RuleTemplate: + """Returns copies of the rules wrapped and expands string templates in + the endpoint, rule, defaults or subdomain sections. + + Here a small example for such a rule template:: + + from werkzeug.routing import Map, Rule, RuleTemplate + + resource = RuleTemplate([ + Rule('/$name/', endpoint='$name.list'), + Rule('/$name/', endpoint='$name.show') + ]) + + url_map = Map([resource(name='user'), resource(name='page')]) + + When a rule template is called the keyword arguments are used to + replace the placeholders in all the string parameters. + """ + + def __init__(self, rules: t.Iterable["Rule"]) -> None: + self.rules = list(rules) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> "RuleTemplateFactory": + return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) + + +class RuleTemplateFactory(RuleFactory): + """A factory that fills in template variables into rules. Used by + `RuleTemplate` internally. + + :internal: + """ + + def __init__( + self, rules: t.Iterable[RuleFactory], context: t.Dict[str, t.Any] + ) -> None: + self.rules = rules + self.context = context + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + new_defaults = subdomain = None + if rule.defaults: + new_defaults = {} + for key, value in rule.defaults.items(): + if isinstance(value, str): + value = Template(value).substitute(self.context) + new_defaults[key] = value + if rule.subdomain is not None: + subdomain = Template(rule.subdomain).substitute(self.context) + new_endpoint = rule.endpoint + if isinstance(new_endpoint, str): + new_endpoint = Template(new_endpoint).substitute(self.context) + yield Rule( + Template(rule.rule).substitute(self.context), + new_defaults, + subdomain, + rule.methods, + rule.build_only, + new_endpoint, + rule.strict_slashes, + ) + + +def _prefix_names(src: str) -> ast.stmt: + """ast parse and prefix names with `.` to avoid collision with user vars""" + tree = ast.parse(src).body[0] + if isinstance(tree, ast.Expr): + tree = tree.value # type: ignore + for node in ast.walk(tree): + if isinstance(node, ast.Name): + node.id = f".{node.id}" + return tree + + +_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()" +_IF_KWARGS_URL_ENCODE_CODE = """\ +if kwargs: + q = '?' + params = self._encode_query_vars(kwargs) +else: + q = params = '' +""" +_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) +_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) + + +class Rule(RuleFactory): + """A Rule represents one URL pattern. There are some options for `Rule` + that change the way it behaves and are passed to the `Rule` constructor. + Note that besides the rule-string all arguments *must* be keyword arguments + in order to not break the application on Werkzeug upgrades. + + `string` + Rule strings basically are just normal URL paths with placeholders in + the format ```` where the converter and the + arguments are optional. If no converter is defined the `default` + converter is used which means `string` in the normal configuration. + + URL rules that end with a slash are branch URLs, others are leaves. + If you have `strict_slashes` enabled (which is the default), all + branch URLs that are matched without a trailing slash will trigger a + redirect to the same URL with the missing slash appended. + + The converters are defined on the `Map`. + + `endpoint` + The endpoint for this rule. This can be anything. A reference to a + function, a string, a number etc. The preferred way is using a string + because the endpoint is used for URL generation. + + `defaults` + An optional dict with defaults for other rules with the same endpoint. + This is a bit tricky but useful if you want to have unique URLs:: + + url_map = Map([ + Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), + Rule('/all/page/', endpoint='all_entries') + ]) + + If a user now visits ``http://example.com/all/page/1`` he will be + redirected to ``http://example.com/all/``. If `redirect_defaults` is + disabled on the `Map` instance this will only affect the URL + generation. + + `subdomain` + The subdomain rule string for this rule. If not specified the rule + only matches for the `default_subdomain` of the map. If the map is + not bound to a subdomain this feature is disabled. + + Can be useful if you want to have user profiles on different subdomains + and all subdomains are forwarded to your application:: + + url_map = Map([ + Rule('/', subdomain='', endpoint='user/homepage'), + Rule('/stats', subdomain='', endpoint='user/stats') + ]) + + `methods` + A sequence of http methods this rule applies to. If not specified, all + methods are allowed. For example this can be useful if you want different + endpoints for `POST` and `GET`. If methods are defined and the path + matches but the method matched against is not in this list or in the + list of another rule for that path the error raised is of the type + `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the + list of methods and `HEAD` is not, `HEAD` is added automatically. + + `strict_slashes` + Override the `Map` setting for `strict_slashes` only for this rule. If + not specified the `Map` setting is used. + + `merge_slashes` + Override :attr:`Map.merge_slashes` for this rule. + + `build_only` + Set this to True and the rule will never match but will create a URL + that can be build. This is useful if you have resources on a subdomain + or folder that are not handled by the WSGI application (like static data) + + `redirect_to` + If given this must be either a string or callable. In case of a + callable it's called with the url adapter that triggered the match and + the values of the URL as keyword arguments and has to return the target + for the redirect, otherwise it has to be a string with placeholders in + rule syntax:: + + def foo_with_slug(adapter, id): + # ask the database for the slug for the old id. this of + # course has nothing to do with werkzeug. + return f'foo/{Foo.get_slug_for_id(id)}' + + url_map = Map([ + Rule('/foo/', endpoint='foo'), + Rule('/some/old/url/', redirect_to='foo/'), + Rule('/other/old/url/', redirect_to=foo_with_slug) + ]) + + When the rule is matched the routing system will raise a + `RequestRedirect` exception with the target for the redirect. + + Keep in mind that the URL will be joined against the URL root of the + script so don't use a leading slash on the target URL unless you + really mean root of that domain. + + `alias` + If enabled this rule serves as an alias for another rule with the same + endpoint and arguments. + + `host` + If provided and the URL map has host matching enabled this can be + used to provide a match rule for the whole host. This also means + that the subdomain feature is disabled. + + `websocket` + If ``True``, this rule is only matches for WebSocket (``ws://``, + ``wss://``) requests. By default, rules will only match for HTTP + requests. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionadded:: 1.0 + Added ``merge_slashes``. + + .. versionadded:: 0.7 + Added ``alias`` and ``host``. + + .. versionchanged:: 0.6.1 + ``HEAD`` is added to ``methods`` if ``GET`` is present. + """ + + def __init__( + self, + string: str, + defaults: t.Optional[t.Mapping[str, t.Any]] = None, + subdomain: t.Optional[str] = None, + methods: t.Optional[t.Iterable[str]] = None, + build_only: bool = False, + endpoint: t.Optional[str] = None, + strict_slashes: t.Optional[bool] = None, + merge_slashes: t.Optional[bool] = None, + redirect_to: t.Optional[t.Union[str, t.Callable[..., str]]] = None, + alias: bool = False, + host: t.Optional[str] = None, + websocket: bool = False, + ) -> None: + if not string.startswith("/"): + raise ValueError("urls must start with a leading slash") + self.rule = string + self.is_leaf = not string.endswith("/") + + self.map: "Map" = None # type: ignore + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.subdomain = subdomain + self.host = host + self.defaults = defaults + self.build_only = build_only + self.alias = alias + self.websocket = websocket + + if methods is not None: + if isinstance(methods, str): + raise TypeError("'methods' should be a list of strings.") + + methods = {x.upper() for x in methods} + + if "HEAD" not in methods and "GET" in methods: + methods.add("HEAD") + + if websocket and methods - {"GET", "HEAD", "OPTIONS"}: + raise ValueError( + "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods." + ) + + self.methods = methods + self.endpoint: str = endpoint # type: ignore + self.redirect_to = redirect_to + + if defaults: + self.arguments = set(map(str, defaults)) + else: + self.arguments = set() + + self._trace: t.List[t.Tuple[bool, str]] = [] + + def empty(self) -> "Rule": + """ + Return an unbound copy of this rule. + + This can be useful if want to reuse an already bound URL for another + map. See ``get_empty_kwargs`` to override what keyword arguments are + provided to the new copy. + """ + return type(self)(self.rule, **self.get_empty_kwargs()) + + def get_empty_kwargs(self) -> t.Mapping[str, t.Any]: + """ + Provides kwargs for instantiating empty copy with empty() + + Use this method to provide custom keyword arguments to the subclass of + ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass + has custom keyword arguments that are needed at instantiation. + + Must return a ``dict`` that will be provided as kwargs to the new + instance of ``Rule``, following the initial ``self.rule`` value which + is always provided as the first, required positional argument. + """ + defaults = None + if self.defaults: + defaults = dict(self.defaults) + return dict( + defaults=defaults, + subdomain=self.subdomain, + methods=self.methods, + build_only=self.build_only, + endpoint=self.endpoint, + strict_slashes=self.strict_slashes, + redirect_to=self.redirect_to, + alias=self.alias, + host=self.host, + ) + + def get_rules(self, map: "Map") -> t.Iterator["Rule"]: + yield self + + def refresh(self) -> None: + """Rebinds and refreshes the URL. Call this if you modified the + rule in place. + + :internal: + """ + self.bind(self.map, rebind=True) + + def bind(self, map: "Map", rebind: bool = False) -> None: + """Bind the url to a map and create a regular expression based on + the information from the rule itself and the defaults from the map. + + :internal: + """ + if self.map is not None and not rebind: + raise RuntimeError(f"url rule {self!r} already bound to map {self.map!r}") + self.map = map + if self.strict_slashes is None: + self.strict_slashes = map.strict_slashes + if self.merge_slashes is None: + self.merge_slashes = map.merge_slashes + if self.subdomain is None: + self.subdomain = map.default_subdomain + self.compile() + + def get_converter( + self, + variable_name: str, + converter_name: str, + args: t.Tuple, + kwargs: t.Mapping[str, t.Any], + ) -> "BaseConverter": + """Looks up the converter for the given parameter. + + .. versionadded:: 0.9 + """ + if converter_name not in self.map.converters: + raise LookupError(f"the converter {converter_name!r} does not exist") + return self.map.converters[converter_name](self.map, *args, **kwargs) + + def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str: + return url_encode( + query_vars, + charset=self.map.charset, + sort=self.map.sort_parameters, + key=self.map.sort_key, + ) + + def compile(self) -> None: + """Compiles the regular expression and stores it.""" + assert self.map is not None, "rule not bound" + + if self.map.host_matching: + domain_rule = self.host or "" + else: + domain_rule = self.subdomain or "" + + self._trace = [] + self._converters: t.Dict[str, "BaseConverter"] = {} + self._static_weights: t.List[t.Tuple[int, int]] = [] + self._argument_weights: t.List[int] = [] + regex_parts = [] + + def _build_regex(rule: str) -> None: + index = 0 + for converter, arguments, variable in parse_rule(rule): + if converter is None: + for match in re.finditer(r"/+|[^/]+", variable): + part = match.group(0) + if part.startswith("/"): + if self.merge_slashes: + regex_parts.append(r"/+?") + self._trace.append((False, "/")) + else: + regex_parts.append(part) + self._trace.append((False, part)) + continue + self._trace.append((False, part)) + regex_parts.append(re.escape(part)) + if part: + self._static_weights.append((index, -len(part))) + else: + if arguments: + c_args, c_kwargs = parse_converter_args(arguments) + else: + c_args = () + c_kwargs = {} + convobj = self.get_converter(variable, converter, c_args, c_kwargs) + regex_parts.append(f"(?P<{variable}>{convobj.regex})") + self._converters[variable] = convobj + self._trace.append((True, variable)) + self._argument_weights.append(convobj.weight) + self.arguments.add(str(variable)) + index = index + 1 + + _build_regex(domain_rule) + regex_parts.append("\\|") + self._trace.append((False, "|")) + _build_regex(self.rule if self.is_leaf else self.rule.rstrip("/")) + if not self.is_leaf: + self._trace.append((False, "/")) + + self._build: t.Callable[..., t.Tuple[str, str]] + self._build = self._compile_builder(False).__get__(self, None) # type: ignore + self._build_unknown: t.Callable[..., t.Tuple[str, str]] + self._build_unknown = self._compile_builder(True).__get__( # type: ignore + self, None + ) + + if self.build_only: + return + + if not (self.is_leaf and self.strict_slashes): + reps = "*" if self.merge_slashes else "?" + tail = f"(?/{reps})" + else: + tail = "" + + regex = f"^{''.join(regex_parts)}{tail}$" + self._regex = re.compile(regex) + + def match( + self, path: str, method: t.Optional[str] = None + ) -> t.Optional[t.MutableMapping[str, t.Any]]: + """Check if the rule matches a given path. Path is a string in the + form ``"subdomain|/path"`` and is assembled by the map. If + the map is doing host matching the subdomain part will be the host + instead. + + If the rule matches a dict with the converted values is returned, + otherwise the return value is `None`. + + :internal: + """ + if not self.build_only: + require_redirect = False + + m = self._regex.search(path) + if m is not None: + groups = m.groupdict() + # we have a folder like part of the url without a trailing + # slash and strict slashes enabled. raise an exception that + # tells the map to redirect to the same url but with a + # trailing slash + if ( + self.strict_slashes + and not self.is_leaf + and not groups.pop("__suffix__") + and ( + method is None or self.methods is None or method in self.methods + ) + ): + path += "/" + require_redirect = True + # if we are not in strict slashes mode we have to remove + # a __suffix__ + elif not self.strict_slashes: + del groups["__suffix__"] + + result = {} + for name, value in groups.items(): + try: + value = self._converters[name].to_python(value) + except ValidationError: + return None + result[str(name)] = value + if self.defaults: + result.update(self.defaults) + + if self.merge_slashes: + new_path = "|".join(self.build(result, False)) # type: ignore + if path.endswith("/") and not new_path.endswith("/"): + new_path += "/" + if new_path.count("/") < path.count("/"): + # The URL will be encoded when MapAdapter.match + # handles the RequestPath raised below. Decode + # the URL here to avoid a double encoding. + path = url_unquote(new_path) + require_redirect = True + + if require_redirect: + path = path.split("|", 1)[1] + raise RequestPath(path) + + if self.alias and self.map.redirect_defaults: + raise RequestAliasRedirect(result) + + return result + + return None + + @staticmethod + def _get_func_code(code: CodeType, name: str) -> t.Callable[..., t.Tuple[str, str]]: + globs: t.Dict[str, t.Any] = {} + locs: t.Dict[str, t.Any] = {} + exec(code, globs, locs) + return locs[name] # type: ignore + + def _compile_builder( + self, append_unknown: bool = True + ) -> t.Callable[..., t.Tuple[str, str]]: + defaults = self.defaults or {} + dom_ops: t.List[t.Tuple[bool, str]] = [] + url_ops: t.List[t.Tuple[bool, str]] = [] + + opl = dom_ops + for is_dynamic, data in self._trace: + if data == "|" and opl is dom_ops: + opl = url_ops + continue + # this seems like a silly case to ever come up but: + # if a default is given for a value that appears in the rule, + # resolve it to a constant ahead of time + if is_dynamic and data in defaults: + data = self._converters[data].to_url(defaults[data]) + opl.append((False, data)) + elif not is_dynamic: + opl.append( + (False, url_quote(_to_bytes(data, self.map.charset), safe="/:|+")) + ) + else: + opl.append((True, data)) + + def _convert(elem: str) -> ast.stmt: + ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) + ret.args = [ast.Name(str(elem), ast.Load())] # type: ignore # str for py2 + return ret + + def _parts(ops: t.List[t.Tuple[bool, str]]) -> t.List[ast.AST]: + parts = [ + _convert(elem) if is_dynamic else ast.Str(s=elem) + for is_dynamic, elem in ops + ] + parts = parts or [ast.Str("")] + # constant fold + ret = [parts[0]] + for p in parts[1:]: + if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str): + ret[-1] = ast.Str(ret[-1].s + p.s) + else: + ret.append(p) + return ret + + dom_parts = _parts(dom_ops) + url_parts = _parts(url_ops) + if not append_unknown: + body = [] + else: + body = [_IF_KWARGS_URL_ENCODE_AST] + url_parts.extend(_URL_ENCODE_AST_NAMES) + + def _join(parts: t.List[ast.AST]) -> ast.AST: + if len(parts) == 1: # shortcut + return parts[0] + return ast.JoinedStr(parts) + + body.append( + ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())) + ) + + pargs = [ + elem + for is_dynamic, elem in dom_ops + url_ops + if is_dynamic and elem not in defaults + ] + kargs = [str(k) for k in defaults] + + func_ast: ast.FunctionDef = _prefix_names("def _(): pass") # type: ignore + func_ast.name = f"" + func_ast.args.args.append(ast.arg(".self", None)) + for arg in pargs + kargs: + func_ast.args.args.append(ast.arg(arg, None)) + func_ast.args.kwarg = ast.arg(".kwargs", None) + for _ in kargs: + func_ast.args.defaults.append(ast.Str("")) + func_ast.body = body + + # use `ast.parse` instead of `ast.Module` for better portability + # Python 3.8 changes the signature of `ast.Module` + module = ast.parse("") + module.body = [func_ast] + + # mark everything as on line 1, offset 0 + # less error-prone than `ast.fix_missing_locations` + # bad line numbers cause an assert to fail in debug builds + for node in ast.walk(module): + if "lineno" in node._attributes: + node.lineno = 1 + if "col_offset" in node._attributes: + node.col_offset = 0 + + code = compile(module, "", "exec") + return self._get_func_code(code, func_ast.name) + + def build( + self, values: t.Mapping[str, t.Any], append_unknown: bool = True + ) -> t.Optional[t.Tuple[str, str]]: + """Assembles the relative url for that rule and the subdomain. + If building doesn't work for some reasons `None` is returned. + + :internal: + """ + try: + if append_unknown: + return self._build_unknown(**values) + else: + return self._build(**values) + except ValidationError: + return None + + def provides_defaults_for(self, rule: "Rule") -> bool: + """Check if this rule has defaults for a given rule. + + :internal: + """ + return bool( + not self.build_only + and self.defaults + and self.endpoint == rule.endpoint + and self != rule + and self.arguments == rule.arguments + ) + + def suitable_for( + self, values: t.Mapping[str, t.Any], method: t.Optional[str] = None + ) -> bool: + """Check if the dict of values has enough data for url generation. + + :internal: + """ + # if a method was given explicitly and that method is not supported + # by this rule, this rule is not suitable. + if ( + method is not None + and self.methods is not None + and method not in self.methods + ): + return False + + defaults = self.defaults or () + + # all arguments required must be either in the defaults dict or + # the value dictionary otherwise it's not suitable + for key in self.arguments: + if key not in defaults and key not in values: + return False + + # in case defaults are given we ensure that either the value was + # skipped or the value is the same as the default value. + if defaults: + for key, value in defaults.items(): + if key in values and value != values[key]: + return False + + return True + + def match_compare_key( + self, + ) -> t.Tuple[bool, int, t.Iterable[t.Tuple[int, int]], int, t.Iterable[int]]: + """The match compare key for sorting. + + Current implementation: + + 1. rules without any arguments come first for performance + reasons only as we expect them to match faster and some + common ones usually don't have any arguments (index pages etc.) + 2. rules with more static parts come first so the second argument + is the negative length of the number of the static weights. + 3. we order by static weights, which is a combination of index + and length + 4. The more complex rules come first so the next argument is the + negative length of the number of argument weights. + 5. lastly we order by the actual argument weights. + + :internal: + """ + return ( + bool(self.arguments), + -len(self._static_weights), + self._static_weights, + -len(self._argument_weights), + self._argument_weights, + ) + + def build_compare_key(self) -> t.Tuple[int, int, int]: + """The build compare key for sorting. + + :internal: + """ + return (1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ())) + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self._trace == other._trace + + __hash__ = None # type: ignore + + def __str__(self) -> str: + return self.rule + + def __repr__(self) -> str: + if self.map is None: + return f"<{type(self).__name__} (unbound)>" + parts = [] + for is_dynamic, data in self._trace: + if is_dynamic: + parts.append(f"<{data}>") + else: + parts.append(data) + parts = "".join(parts).lstrip("|") + methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" + return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" + + +class BaseConverter: + """Base class for all converters.""" + + regex = "[^/]+" + weight = 100 + + def __init__(self, map: "Map", *args: t.Any, **kwargs: t.Any) -> None: + self.map = map + + def to_python(self, value: str) -> t.Any: + return value + + def to_url(self, value: t.Any) -> str: + if isinstance(value, (bytes, bytearray)): + return _fast_url_quote(value) + return _fast_url_quote(str(value).encode(self.map.charset)) + + +class UnicodeConverter(BaseConverter): + """This converter is the default converter and accepts any string but + only one path segment. Thus the string can not include a slash. + + This is the default validator. + + Example:: + + Rule('/pages/'), + Rule('/') + + :param map: the :class:`Map`. + :param minlength: the minimum length of the string. Must be greater + or equal 1. + :param maxlength: the maximum length of the string. + :param length: the exact length of the string. + """ + + def __init__( + self, + map: "Map", + minlength: int = 1, + maxlength: t.Optional[int] = None, + length: t.Optional[int] = None, + ) -> None: + super().__init__(map) + if length is not None: + length_regex = f"{{{int(length)}}}" + else: + if maxlength is None: + maxlength_value = "" + else: + maxlength_value = str(int(maxlength)) + length_regex = f"{{{int(minlength)},{maxlength_value}}}" + self.regex = f"[^/]{length_regex}" + + +class AnyConverter(BaseConverter): + """Matches one of the items provided. Items can either be Python + identifiers or strings:: + + Rule('/') + + :param map: the :class:`Map`. + :param items: this function accepts the possible items as positional + arguments. + """ + + def __init__(self, map: "Map", *items: str) -> None: + super().__init__(map) + self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})" + + +class PathConverter(BaseConverter): + """Like the default :class:`UnicodeConverter`, but it also matches + slashes. This is useful for wikis and similar applications:: + + Rule('/') + Rule('//edit') + + :param map: the :class:`Map`. + """ + + regex = "[^/].*?" + weight = 200 + + +class NumberConverter(BaseConverter): + """Baseclass for `IntegerConverter` and `FloatConverter`. + + :internal: + """ + + weight = 50 + num_convert: t.Callable = int + + def __init__( + self, + map: "Map", + fixed_digits: int = 0, + min: t.Optional[int] = None, + max: t.Optional[int] = None, + signed: bool = False, + ) -> None: + if signed: + self.regex = self.signed_regex + super().__init__(map) + self.fixed_digits = fixed_digits + self.min = min + self.max = max + self.signed = signed + + def to_python(self, value: str) -> t.Any: + if self.fixed_digits and len(value) != self.fixed_digits: + raise ValidationError() + value = self.num_convert(value) + if (self.min is not None and value < self.min) or ( + self.max is not None and value > self.max + ): + raise ValidationError() + return value + + def to_url(self, value: t.Any) -> str: + value = str(self.num_convert(value)) + if self.fixed_digits: + value = value.zfill(self.fixed_digits) + return value + + @property + def signed_regex(self) -> str: + return f"-?{self.regex}" + + +class IntegerConverter(NumberConverter): + """This converter only accepts integer values:: + + Rule("/page/") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/page/") + + :param map: The :class:`Map`. + :param fixed_digits: The number of fixed digits in the URL. If you + set this to ``4`` for example, the rule will only match if the + URL looks like ``/0001/``. The default is variable length. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+" + + +class FloatConverter(NumberConverter): + """This converter only accepts floating point values:: + + Rule("/probability/") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/offset/") + + :param map: The :class:`Map`. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+\.\d+" + num_convert = float + + def __init__( + self, + map: "Map", + min: t.Optional[float] = None, + max: t.Optional[float] = None, + signed: bool = False, + ) -> None: + super().__init__(map, min=min, max=max, signed=signed) # type: ignore + + +class UUIDConverter(BaseConverter): + """This converter only accepts UUID strings:: + + Rule('/object/') + + .. versionadded:: 0.10 + + :param map: the :class:`Map`. + """ + + regex = ( + r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-" + r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}" + ) + + def to_python(self, value: str) -> uuid.UUID: + return uuid.UUID(value) + + def to_url(self, value: uuid.UUID) -> str: + return str(value) + + +#: the default converter mapping for the map. +DEFAULT_CONVERTERS: t.Mapping[str, t.Type[BaseConverter]] = { + "default": UnicodeConverter, + "string": UnicodeConverter, + "any": AnyConverter, + "path": PathConverter, + "int": IntegerConverter, + "float": FloatConverter, + "uuid": UUIDConverter, +} + + +class Map: + """The map class stores all the URL rules and some configuration + parameters. Some of the configuration values are only stored on the + `Map` instance since those affect all rules, others are just defaults + and can be overridden for each rule. Note that you have to specify all + arguments besides the `rules` as keyword arguments! + + :param rules: sequence of url rules for this map. + :param default_subdomain: The default subdomain for rules without a + subdomain defined. + :param charset: charset of the url. defaults to ``"utf-8"`` + :param strict_slashes: If a rule ends with a slash but the matched + URL does not, redirect to the URL with a trailing slash. + :param merge_slashes: Merge consecutive slashes when matching or + building URLs. Matches will redirect to the normalized URL. + Slashes in variable parts are not merged. + :param redirect_defaults: This will redirect to the default rule if it + wasn't visited that way. This helps creating + unique URLs. + :param converters: A dict of converters that adds additional converters + to the list of converters. If you redefine one + converter this will override the original one. + :param sort_parameters: If set to `True` the url parameters are sorted. + See `url_encode` for more details. + :param sort_key: The sort key function for `url_encode`. + :param encoding_errors: the error method to use for decoding + :param host_matching: if set to `True` it enables the host matching + feature and disables the subdomain one. If + enabled the `host` parameter to rules is used + instead of the `subdomain` one. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules + will match. + + .. versionchanged:: 1.0 + Added ``merge_slashes``. + + .. versionchanged:: 0.7 + Added ``encoding_errors`` and ``host_matching``. + + .. versionchanged:: 0.5 + Added ``sort_parameters`` and ``sort_key``. + """ + + #: A dict of default converters to be used. + default_converters = ImmutableDict(DEFAULT_CONVERTERS) + + #: The type of lock to use when updating. + #: + #: .. versionadded:: 1.0 + lock_class = Lock + + def __init__( + self, + rules: t.Optional[t.Iterable[RuleFactory]] = None, + default_subdomain: str = "", + charset: str = "utf-8", + strict_slashes: bool = True, + merge_slashes: bool = True, + redirect_defaults: bool = True, + converters: t.Optional[t.Mapping[str, t.Type[BaseConverter]]] = None, + sort_parameters: bool = False, + sort_key: t.Optional[t.Callable[[t.Any], t.Any]] = None, + encoding_errors: str = "replace", + host_matching: bool = False, + ) -> None: + self._rules: t.List[Rule] = [] + self._rules_by_endpoint: t.Dict[str, t.List[Rule]] = {} + self._remap = True + self._remap_lock = self.lock_class() + + self.default_subdomain = default_subdomain + self.charset = charset + self.encoding_errors = encoding_errors + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.redirect_defaults = redirect_defaults + self.host_matching = host_matching + + self.converters = self.default_converters.copy() + if converters: + self.converters.update(converters) + + self.sort_parameters = sort_parameters + self.sort_key = sort_key + + for rulefactory in rules or (): + self.add(rulefactory) + + def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: + """Iterate over all rules and check if the endpoint expects + the arguments provided. This is for example useful if you have + some URLs that expect a language code and others that do not and + you want to wrap the builder a bit so that the current language + code is automatically added if not provided but endpoints expect + it. + + :param endpoint: the endpoint to check. + :param arguments: this function accepts one or more arguments + as positional arguments. Each one of them is + checked. + """ + self.update() + arguments = set(arguments) + for rule in self._rules_by_endpoint[endpoint]: + if arguments.issubset(rule.arguments): + return True + return False + + def iter_rules(self, endpoint: t.Optional[str] = None) -> t.Iterator[Rule]: + """Iterate over all rules or the rules of an endpoint. + + :param endpoint: if provided only the rules for that endpoint + are returned. + :return: an iterator + """ + self.update() + if endpoint is not None: + return iter(self._rules_by_endpoint[endpoint]) + return iter(self._rules) + + def add(self, rulefactory: RuleFactory) -> None: + """Add a new rule or factory to the map and bind it. Requires that the + rule is not bound to another map. + + :param rulefactory: a :class:`Rule` or :class:`RuleFactory` + """ + for rule in rulefactory.get_rules(self): + rule.bind(self) + self._rules.append(rule) + self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) + self._remap = True + + def bind( + self, + server_name: str, + script_name: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + url_scheme: str = "http", + default_method: str = "GET", + path_info: t.Optional[str] = None, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + ) -> "MapAdapter": + """Return a new :class:`MapAdapter` with the details specified to the + call. Note that `script_name` will default to ``'/'`` if not further + specified or `None`. The `server_name` at least is a requirement + because the HTTP RFC requires absolute URLs for redirects and so all + redirect exceptions raised by Werkzeug will contain the full canonical + URL. + + If no path_info is passed to :meth:`match` it will use the default path + info passed to bind. While this doesn't really make sense for + manual bind calls, it's useful if you bind a map to a WSGI + environment which already contains the path info. + + `subdomain` will default to the `default_subdomain` for this map if + no defined. If there is no `default_subdomain` you cannot use the + subdomain feature. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules + will match. + + .. versionchanged:: 0.15 + ``path_info`` defaults to ``'/'`` if ``None``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionchanged:: 0.7 + Added ``query_args``. + """ + server_name = server_name.lower() + if self.host_matching: + if subdomain is not None: + raise RuntimeError("host matching enabled and a subdomain was provided") + elif subdomain is None: + subdomain = self.default_subdomain + if script_name is None: + script_name = "/" + if path_info is None: + path_info = "/" + + try: + server_name = _encode_idna(server_name) # type: ignore + except UnicodeError as e: + raise BadHost() from e + + return MapAdapter( + self, + server_name, + script_name, + subdomain, + url_scheme, + path_info, + default_method, + query_args, + ) + + def bind_to_environ( + self, + environ: "WSGIEnvironment", + server_name: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + ) -> "MapAdapter": + """Like :meth:`bind` but you can pass it an WSGI environment and it + will fetch the information from that dictionary. Note that because of + limitations in the protocol there is no way to get the current + subdomain and real `server_name` from the environment. If you don't + provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or + `HTTP_HOST` if provided) as used `server_name` with disabled subdomain + feature. + + If `subdomain` is `None` but an environment and a server name is + provided it will calculate the current subdomain automatically. + Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` + in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated + subdomain will be ``'staging.dev'``. + + If the object passed as environ has an environ attribute, the value of + this attribute is used instead. This allows you to pass request + objects. Additionally `PATH_INFO` added as a default of the + :class:`MapAdapter` so that you don't have to pass the path info to + the match method. + + .. versionchanged:: 1.0.0 + If the passed server name specifies port 443, it will match + if the incoming scheme is ``https`` without a port. + + .. versionchanged:: 1.0.0 + A warning is shown when the passed server name does not + match the incoming WSGI server name. + + .. versionchanged:: 0.8 + This will no longer raise a ValueError when an unexpected server + name was passed. + + .. versionchanged:: 0.5 + previously this method accepted a bogus `calculate_subdomain` + parameter that did not have any effect. It was removed because + of that. + + :param environ: a WSGI environment. + :param server_name: an optional server name hint (see above). + :param subdomain: optionally the current subdomain (see above). + """ + environ = _get_environ(environ) + wsgi_server_name = get_host(environ).lower() + scheme = environ["wsgi.url_scheme"] + upgrade = any( + v.strip() == "upgrade" + for v in environ.get("HTTP_CONNECTION", "").lower().split(",") + ) + + if upgrade and environ.get("HTTP_UPGRADE", "").lower() == "websocket": + scheme = "wss" if scheme == "https" else "ws" + + if server_name is None: + server_name = wsgi_server_name + else: + server_name = server_name.lower() + + # strip standard port to match get_host() + if scheme in {"http", "ws"} and server_name.endswith(":80"): + server_name = server_name[:-3] + elif scheme in {"https", "wss"} and server_name.endswith(":443"): + server_name = server_name[:-4] + + if subdomain is None and not self.host_matching: + cur_server_name = wsgi_server_name.split(".") + real_server_name = server_name.split(".") + offset = -len(real_server_name) + + if cur_server_name[offset:] != real_server_name: + # This can happen even with valid configs if the server was + # accessed directly by IP address under some situations. + # Instead of raising an exception like in Werkzeug 0.7 or + # earlier we go by an invalid subdomain which will result + # in a 404 error on matching. + warnings.warn( + f"Current server name {wsgi_server_name!r} doesn't match configured" + f" server name {server_name!r}", + stacklevel=2, + ) + subdomain = "" + else: + subdomain = ".".join(filter(None, cur_server_name[:offset])) + + def _get_wsgi_string(name: str) -> t.Optional[str]: + val = environ.get(name) + if val is not None: + return _wsgi_decoding_dance(val, self.charset) + return None + + script_name = _get_wsgi_string("SCRIPT_NAME") + path_info = _get_wsgi_string("PATH_INFO") + query_args = _get_wsgi_string("QUERY_STRING") + return Map.bind( + self, + server_name, + script_name, + subdomain, + scheme, + environ["REQUEST_METHOD"], + path_info, + query_args=query_args, + ) + + def update(self) -> None: + """Called before matching and building to keep the compiled rules + in the correct order after things changed. + """ + if not self._remap: + return + + with self._remap_lock: + if not self._remap: + return + + self._rules.sort(key=lambda x: x.match_compare_key()) + for rules in self._rules_by_endpoint.values(): + rules.sort(key=lambda x: x.build_compare_key()) + self._remap = False + + def __repr__(self) -> str: + rules = self.iter_rules() + return f"{type(self).__name__}({pformat(list(rules))})" + + +class MapAdapter: + + """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does + the URL matching and building based on runtime information. + """ + + def __init__( + self, + map: Map, + server_name: str, + script_name: str, + subdomain: t.Optional[str], + url_scheme: str, + path_info: str, + default_method: str, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + ): + self.map = map + self.server_name = _to_str(server_name) + script_name = _to_str(script_name) + if not script_name.endswith("/"): + script_name += "/" + self.script_name = script_name + self.subdomain = _to_str(subdomain) + self.url_scheme = _to_str(url_scheme) + self.path_info = _to_str(path_info) + self.default_method = _to_str(default_method) + self.query_args = query_args + self.websocket = self.url_scheme in {"ws", "wss"} + + def dispatch( + self, + view_func: t.Callable[[str, t.Mapping[str, t.Any]], "WSGIApplication"], + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + catch_http_exceptions: bool = False, + ) -> "WSGIApplication": + """Does the complete dispatching process. `view_func` is called with + the endpoint and a dict with the values for the view. It should + look up the view function, call it, and return a response object + or WSGI application. http exceptions are not caught by default + so that applications can display nicer error messages by just + catching them by hand. If you want to stick with the default + error messages you can pass it ``catch_http_exceptions=True`` and + it will catch the http exceptions. + + Here a small example for the dispatch usage:: + + from werkzeug.wrappers import Request, Response + from werkzeug.wsgi import responder + from werkzeug.routing import Map, Rule + + def on_index(request): + return Response('Hello from the index') + + url_map = Map([Rule('/', endpoint='index')]) + views = {'index': on_index} + + @responder + def application(environ, start_response): + request = Request(environ) + urls = url_map.bind_to_environ(environ) + return urls.dispatch(lambda e, v: views[e](request, **v), + catch_http_exceptions=True) + + Keep in mind that this method might return exception objects, too, so + use :class:`Response.force_type` to get a response object. + + :param view_func: a function that is called with the endpoint as + first argument and the value dict as second. Has + to dispatch to the actual view function with this + information. (see above) + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param catch_http_exceptions: set to `True` to catch any of the + werkzeug :class:`HTTPException`\\s. + """ + try: + try: + endpoint, args = self.match(path_info, method) + except RequestRedirect as e: + return e + return view_func(endpoint, args) + except HTTPException as e: + if catch_http_exceptions: + return e + raise + + @typing.overload + def match( # type: ignore + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: "te.Literal[False]" = False, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[str, t.Mapping[str, t.Any]]: + ... + + @typing.overload + def match( + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: "te.Literal[True]" = True, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[Rule, t.Mapping[str, t.Any]]: + ... + + def match( + self, + path_info: t.Optional[str] = None, + method: t.Optional[str] = None, + return_rule: bool = False, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + websocket: t.Optional[bool] = None, + ) -> t.Tuple[t.Union[str, Rule], t.Mapping[str, t.Any]]: + """The usage is simple: you just pass the match method the current + path info as well as the method (which defaults to `GET`). The + following things can then happen: + + - you receive a `NotFound` exception that indicates that no URL is + matching. A `NotFound` exception is also a WSGI application you + can call to get a default page not found page (happens to be the + same object as `werkzeug.exceptions.NotFound`) + + - you receive a `MethodNotAllowed` exception that indicates that there + is a match for this URL but not for the current request method. + This is useful for RESTful applications. + + - you receive a `RequestRedirect` exception with a `new_url` + attribute. This exception is used to notify you about a request + Werkzeug requests from your WSGI application. This is for example the + case if you request ``/foo`` although the correct URL is ``/foo/`` + You can use the `RequestRedirect` instance as response-like object + similar to all other subclasses of `HTTPException`. + + - you receive a ``WebsocketMismatch`` exception if the only + match is a WebSocket rule but the bind is an HTTP request, or + if the match is an HTTP rule but the bind is a WebSocket + request. + + - you get a tuple in the form ``(endpoint, arguments)`` if there is + a match (unless `return_rule` is True, in which case you get a tuple + in the form ``(rule, arguments)``) + + If the path info is not passed to the match method the default path + info of the map is used (defaults to the root URL if not defined + explicitly). + + All of the exceptions raised are subclasses of `HTTPException` so they + can be used as WSGI responses. They will all render generic error or + redirect pages. + + Here is a small example for matching: + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.match("/", "GET") + ('index', {}) + >>> urls.match("/downloads/42") + ('downloads/show', {'id': 42}) + + And here is what happens on redirect and missing URLs: + + >>> urls.match("/downloads") + Traceback (most recent call last): + ... + RequestRedirect: http://example.com/downloads/ + >>> urls.match("/missing") + Traceback (most recent call last): + ... + NotFound: 404 Not Found + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param return_rule: return the rule that matched instead of just the + endpoint (defaults to `False`). + :param query_args: optional query arguments that are used for + automatic redirects as string or dictionary. It's + currently not possible to use the query arguments + for URL matching. + :param websocket: Match WebSocket instead of HTTP requests. A + websocket request has a ``ws`` or ``wss`` + :attr:`url_scheme`. This overrides that detection. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionadded:: 0.7 + Added ``query_args``. + + .. versionadded:: 0.6 + Added ``return_rule``. + """ + self.map.update() + if path_info is None: + path_info = self.path_info + else: + path_info = _to_str(path_info, self.map.charset) + if query_args is None: + query_args = self.query_args or {} + method = (method or self.default_method).upper() + + if websocket is None: + websocket = self.websocket + + require_redirect = False + + domain_part = self.server_name if self.map.host_matching else self.subdomain + path_part = f"/{path_info.lstrip('/')}" if path_info else "" + path = f"{domain_part}|{path_part}" + + have_match_for = set() + websocket_mismatch = False + + for rule in self.map._rules: + try: + rv = rule.match(path, method) + except RequestPath as e: + raise RequestRedirect( + self.make_redirect_url( + url_quote(e.path_info, self.map.charset, safe="/:|+"), + query_args, + ) + ) from None + except RequestAliasRedirect as e: + raise RequestRedirect( + self.make_alias_redirect_url( + path, rule.endpoint, e.matched_values, method, query_args + ) + ) from None + if rv is None: + continue + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + continue + + if rule.websocket != websocket: + websocket_mismatch = True + continue + + if self.map.redirect_defaults: + redirect_url = self.get_default_redirect(rule, method, rv, query_args) + if redirect_url is not None: + raise RequestRedirect(redirect_url) + + if rule.redirect_to is not None: + if isinstance(rule.redirect_to, str): + + def _handle_match(match: t.Match[str]) -> str: + value = rv[match.group(1)] # type: ignore + return rule._converters[match.group(1)].to_url(value) + + redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to) + else: + redirect_url = rule.redirect_to(self, **rv) + + if self.subdomain: + netloc = f"{self.subdomain}.{self.server_name}" + else: + netloc = self.server_name + + raise RequestRedirect( + url_join( + f"{self.url_scheme or 'http'}://{netloc}{self.script_name}", + redirect_url, + ) + ) + + if require_redirect: + raise RequestRedirect( + self.make_redirect_url( + url_quote(path_info, self.map.charset, safe="/:|+"), query_args + ) + ) + + if return_rule: + return rule, rv + else: + return rule.endpoint, rv + + if have_match_for: + raise MethodNotAllowed(valid_methods=list(have_match_for)) + + if websocket_mismatch: + raise WebsocketMismatch() + + raise NotFound() + + def test( + self, path_info: t.Optional[str] = None, method: t.Optional[str] = None + ) -> bool: + """Test if a rule would match. Works like `match` but returns `True` + if the URL matches, or `False` if it does not exist. + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + """ + try: + self.match(path_info, method) + except RequestRedirect: + pass + except HTTPException: + return False + return True + + def allowed_methods(self, path_info: t.Optional[str] = None) -> t.Iterable[str]: + """Returns the valid methods that match for a given path. + + .. versionadded:: 0.7 + """ + try: + self.match(path_info, method="--") + except MethodNotAllowed as e: + return e.valid_methods # type: ignore + except HTTPException: + pass + return [] + + def get_host(self, domain_part: t.Optional[str]) -> str: + """Figures out the full host name for the given domain part. The + domain part is a subdomain in case host matching is disabled or + a full host name. + """ + if self.map.host_matching: + if domain_part is None: + return self.server_name + return _to_str(domain_part, "ascii") + subdomain = domain_part + if subdomain is None: + subdomain = self.subdomain + else: + subdomain = _to_str(subdomain, "ascii") + + if subdomain: + return f"{subdomain}.{self.server_name}" + else: + return self.server_name + + def get_default_redirect( + self, + rule: Rule, + method: str, + values: t.MutableMapping[str, t.Any], + query_args: t.Union[t.Mapping[str, t.Any], str], + ) -> t.Optional[str]: + """A helper that returns the URL to redirect to if it finds one. + This is used for default redirecting only. + + :internal: + """ + assert self.map.redirect_defaults + for r in self.map._rules_by_endpoint[rule.endpoint]: + # every rule that comes after this one, including ourself + # has a lower priority for the defaults. We order the ones + # with the highest priority up for building. + if r is rule: + break + if r.provides_defaults_for(rule) and r.suitable_for(values, method): + values.update(r.defaults) # type: ignore + domain_part, path = r.build(values) # type: ignore + return self.make_redirect_url(path, query_args, domain_part=domain_part) + return None + + def encode_query_args(self, query_args: t.Union[t.Mapping[str, t.Any], str]) -> str: + if not isinstance(query_args, str): + return url_encode(query_args, self.map.charset) + return query_args + + def make_redirect_url( + self, + path_info: str, + query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, + domain_part: t.Optional[str] = None, + ) -> str: + """Creates a redirect URL. + + :internal: + """ + if query_args: + suffix = f"?{self.encode_query_args(query_args)}" + else: + suffix = "" + + scheme = self.url_scheme or "http" + host = self.get_host(domain_part) + path = posixpath.join(self.script_name.strip("/"), path_info.lstrip("/")) + return f"{scheme}://{host}/{path}{suffix}" + + def make_alias_redirect_url( + self, + path: str, + endpoint: str, + values: t.Mapping[str, t.Any], + method: str, + query_args: t.Union[t.Mapping[str, t.Any], str], + ) -> str: + """Internally called to make an alias redirect URL.""" + url = self.build( + endpoint, values, method, append_unknown=False, force_external=True + ) + if query_args: + url += f"?{self.encode_query_args(query_args)}" + assert url != path, "detected invalid alias setting. No canonical URL found" + return url + + def _partial_build( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: t.Optional[str], + append_unknown: bool, + ) -> t.Optional[t.Tuple[str, str, bool]]: + """Helper for :meth:`build`. Returns subdomain and path for the + rule that accepts this endpoint, values and method. + + :internal: + """ + # in case the method is none, try with the default method first + if method is None: + rv = self._partial_build( + endpoint, values, self.default_method, append_unknown + ) + if rv is not None: + return rv + + # Default method did not match or a specific method is passed. + # Check all for first match with matching host. If no matching + # host is found, go with first result. + first_match = None + + for rule in self.map._rules_by_endpoint.get(endpoint, ()): + if rule.suitable_for(values, method): + build_rv = rule.build(values, append_unknown) + + if build_rv is not None: + rv = (build_rv[0], build_rv[1], rule.websocket) + if self.map.host_matching: + if rv[0] == self.server_name: + return rv + elif first_match is None: + first_match = rv + else: + return rv + + return first_match + + def build( + self, + endpoint: str, + values: t.Optional[t.Mapping[str, t.Any]] = None, + method: t.Optional[str] = None, + force_external: bool = False, + append_unknown: bool = True, + url_scheme: t.Optional[str] = None, + ) -> str: + """Building URLs works pretty much the other way round. Instead of + `match` you call `build` and pass it the endpoint and a dict of + arguments for the placeholders. + + The `build` function also accepts an argument called `force_external` + which, if you set it to `True` will force external URLs. Per default + external URLs (include the server name) will only be used if the + target URL is on a different subdomain. + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.build("index", {}) + '/' + >>> urls.build("downloads/show", {'id': 42}) + '/downloads/42' + >>> urls.build("downloads/show", {'id': 42}, force_external=True) + 'http://example.com/downloads/42' + + Because URLs cannot contain non ASCII data you will always get + bytes back. Non ASCII characters are urlencoded with the + charset defined on the map instance. + + Additional values are converted to strings and appended to the URL as + URL querystring parameters: + + >>> urls.build("index", {'q': 'My Searchstring'}) + '/?q=My+Searchstring' + + When processing those additional values, lists are furthermore + interpreted as multiple values (as per + :py:class:`werkzeug.datastructures.MultiDict`): + + >>> urls.build("index", {'q': ['a', 'b', 'c']}) + '/?q=a&q=b&q=c' + + Passing a ``MultiDict`` will also add multiple values: + + >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b')))) + '/?p=z&q=a&q=b' + + If a rule does not exist when building a `BuildError` exception is + raised. + + The build method accepts an argument called `method` which allows you + to specify the method you want to have an URL built for if you have + different methods for the same endpoint specified. + + :param endpoint: the endpoint of the URL to build. + :param values: the values for the URL to build. Unhandled values are + appended to the URL as query parameters. + :param method: the HTTP method for the rule if there are different + URLs for different methods on the same endpoint. + :param force_external: enforce full canonical external URLs. If the URL + scheme is not provided, this will generate + a protocol-relative URL. + :param append_unknown: unknown parameters are appended to the generated + URL as query string argument. Disable this + if you want the builder to ignore those. + :param url_scheme: Scheme to use in place of the bound + :attr:`url_scheme`. + + .. versionchanged:: 2.0 + Added the ``url_scheme`` parameter. + + .. versionadded:: 0.6 + Added the ``append_unknown`` parameter. + """ + self.map.update() + + if values: + temp_values: t.Dict[str, t.Union[t.List[t.Any], t.Any]] = {} + always_list = isinstance(values, MultiDict) + key: str + value: t.Optional[t.Union[t.List[t.Any], t.Any]] + + # For MultiDict, dict.items(values) is like values.lists() + # without the call or list coercion overhead. + for key, value in dict.items(values): # type: ignore + if value is None: + continue + + if always_list or isinstance(value, (list, tuple)): + value = [v for v in value if v is not None] + + if not value: + continue + + if len(value) == 1: + value = value[0] + + temp_values[key] = value + + values = temp_values + else: + values = {} + + rv = self._partial_build(endpoint, values, method, append_unknown) + if rv is None: + raise BuildError(endpoint, values, method, self) + + domain_part, path, websocket = rv + host = self.get_host(domain_part) + + if url_scheme is None: + url_scheme = self.url_scheme + + # Always build WebSocket routes with the scheme (browsers + # require full URLs). If bound to a WebSocket, ensure that HTTP + # routes are built with an HTTP scheme. + secure = url_scheme in {"https", "wss"} + + if websocket: + force_external = True + url_scheme = "wss" if secure else "ws" + elif url_scheme: + url_scheme = "https" if secure else "http" + + # shortcut this. + if not force_external and ( + (self.map.host_matching and host == self.server_name) + or (not self.map.host_matching and domain_part == self.subdomain) + ): + return f"{self.script_name.rstrip('/')}/{path.lstrip('/')}" + + scheme = f"{url_scheme}:" if url_scheme else "" + return f"{scheme}//{host}{self.script_name[:-1]}/{path.lstrip('/')}" diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py new file mode 100644 index 0000000..bb8ab34 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py @@ -0,0 +1,260 @@ +import re +from dataclasses import dataclass +from enum import auto +from enum import Enum +from typing import cast +from typing import List +from typing import Optional +from typing import Tuple + +from .._internal import _to_bytes +from .._internal import _to_str +from ..datastructures import Headers +from ..exceptions import RequestEntityTooLarge +from ..http import parse_options_header + + +class Event: + pass + + +@dataclass(frozen=True) +class Preamble(Event): + data: bytes + + +@dataclass(frozen=True) +class Field(Event): + name: str + headers: Headers + + +@dataclass(frozen=True) +class File(Event): + name: str + filename: str + headers: Headers + + +@dataclass(frozen=True) +class Data(Event): + data: bytes + more_data: bool + + +@dataclass(frozen=True) +class Epilogue(Event): + data: bytes + + +class NeedData(Event): + pass + + +NEED_DATA = NeedData() + + +class State(Enum): + PREAMBLE = auto() + PART = auto() + DATA = auto() + EPILOGUE = auto() + COMPLETE = auto() + + +# Multipart line breaks MUST be CRLF (\r\n) by RFC-7578, except that +# many implementations break this and either use CR or LF alone. +LINE_BREAK = b"(?:\r\n|\n|\r)" +BLANK_LINE_RE = re.compile(b"(?:\r\n\r\n|\r\r|\n\n)", re.MULTILINE) +LINE_BREAK_RE = re.compile(LINE_BREAK, re.MULTILINE) +# Header values can be continued via a space or tab after the linebreak, as +# per RFC2231 +HEADER_CONTINUATION_RE = re.compile(b"%s[ \t]" % LINE_BREAK, re.MULTILINE) + + +class MultipartDecoder: + """Decodes a multipart message as bytes into Python events. + + The part data is returned as available to allow the caller to save + the data from memory to disk, if desired. + """ + + def __init__( + self, + boundary: bytes, + max_form_memory_size: Optional[int] = None, + ) -> None: + self.buffer = bytearray() + self.complete = False + self.max_form_memory_size = max_form_memory_size + self.state = State.PREAMBLE + self.boundary = boundary + + # Note in the below \h i.e. horizontal whitespace is used + # as [^\S\n\r] as \h isn't supported in python. + + # The preamble must end with a boundary where the boundary is + # prefixed by a line break, RFC2046. Except that many + # implementations including Werkzeug's tests omit the line + # break prefix. In addition the first boundary could be the + # epilogue boundary (for empty form-data) hence the matching + # group to understand if it is an epilogue boundary. + self.preamble_re = re.compile( + br"%s?--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + # A boundary must include a line break prefix and suffix, and + # may include trailing whitespace. In addition the boundary + # could be the epilogue boundary hence the matching group to + # understand if it is an epilogue boundary. + self.boundary_re = re.compile( + br"%s--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + + def last_newline(self) -> int: + try: + last_nl = self.buffer.rindex(b"\n") + except ValueError: + last_nl = len(self.buffer) + try: + last_cr = self.buffer.rindex(b"\r") + except ValueError: + last_cr = len(self.buffer) + + return min(last_nl, last_cr) + + def receive_data(self, data: Optional[bytes]) -> None: + if data is None: + self.complete = True + elif ( + self.max_form_memory_size is not None + and len(self.buffer) + len(data) > self.max_form_memory_size + ): + raise RequestEntityTooLarge() + else: + self.buffer.extend(data) + + def next_event(self) -> Event: + event: Event = NEED_DATA + + if self.state == State.PREAMBLE: + match = self.preamble_re.search(self.buffer) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data = bytes(self.buffer[: match.start()]) + del self.buffer[: match.end()] + event = Preamble(data=data) + + elif self.state == State.PART: + match = BLANK_LINE_RE.search(self.buffer) + if match is not None: + headers = self._parse_headers(self.buffer[: match.start()]) + del self.buffer[: match.end()] + + if "content-disposition" not in headers: + raise ValueError("Missing Content-Disposition header") + + disposition, extra = parse_options_header( + headers["content-disposition"] + ) + name = cast(str, extra.get("name")) + filename = extra.get("filename") + if filename is not None: + event = File( + filename=filename, + headers=headers, + name=name, + ) + else: + event = Field( + headers=headers, + name=name, + ) + self.state = State.DATA + + elif self.state == State.DATA: + if self.buffer.find(b"--" + self.boundary) == -1: + # No complete boundary in the buffer, but there may be + # a partial boundary at the end. As the boundary + # starts with either a nl or cr find the earliest and + # return up to that as data. + data_length = del_index = self.last_newline() + more_data = True + else: + match = self.boundary_re.search(self.buffer) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data_length = match.start() + del_index = match.end() + else: + data_length = del_index = self.last_newline() + more_data = match is None + + data = bytes(self.buffer[:data_length]) + del self.buffer[:del_index] + if data or not more_data: + event = Data(data=data, more_data=more_data) + + elif self.state == State.EPILOGUE and self.complete: + event = Epilogue(data=bytes(self.buffer)) + del self.buffer[:] + self.state = State.COMPLETE + + if self.complete and isinstance(event, NeedData): + raise ValueError(f"Invalid form-data cannot parse beyond {self.state}") + + return event + + def _parse_headers(self, data: bytes) -> Headers: + headers: List[Tuple[str, str]] = [] + # Merge the continued headers into one line + data = HEADER_CONTINUATION_RE.sub(b" ", data) + # Now there is one header per line + for line in data.splitlines(): + if line.strip() != b"": + name, value = _to_str(line).strip().split(":", 1) + headers.append((name.strip(), value.strip())) + return Headers(headers) + + +class MultipartEncoder: + def __init__(self, boundary: bytes) -> None: + self.boundary = boundary + self.state = State.PREAMBLE + + def send_event(self, event: Event) -> bytes: + if isinstance(event, Preamble) and self.state == State.PREAMBLE: + self.state = State.PART + return event.data + elif isinstance(event, (Field, File)) and self.state in { + State.PREAMBLE, + State.PART, + State.DATA, + }: + self.state = State.DATA + data = b"\r\n--" + self.boundary + b"\r\n" + data += b'Content-Disposition: form-data; name="%s"' % _to_bytes(event.name) + if isinstance(event, File): + data += b'; filename="%s"' % _to_bytes(event.filename) + data += b"\r\n" + for name, value in cast(Field, event).headers: + if name.lower() != "content-disposition": + data += _to_bytes(f"{name}: {value}\r\n") + data += b"\r\n" + return data + elif isinstance(event, Data) and self.state == State.DATA: + return event.data + elif isinstance(event, Epilogue): + self.state = State.COMPLETE + return b"\r\n--" + self.boundary + b"--\r\n" + event.data + else: + raise ValueError(f"Cannot generate {event} in state: {self.state}") diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py new file mode 100644 index 0000000..2c21a21 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py @@ -0,0 +1,548 @@ +import typing as t +from datetime import datetime + +from .._internal import _to_str +from ..datastructures import Accept +from ..datastructures import Authorization +from ..datastructures import CharsetAccept +from ..datastructures import ETags +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..datastructures import IfRange +from ..datastructures import ImmutableList +from ..datastructures import ImmutableMultiDict +from ..datastructures import LanguageAccept +from ..datastructures import MIMEAccept +from ..datastructures import MultiDict +from ..datastructures import Range +from ..datastructures import RequestCacheControl +from ..http import parse_accept_header +from ..http import parse_authorization_header +from ..http import parse_cache_control_header +from ..http import parse_cookie +from ..http import parse_date +from ..http import parse_etags +from ..http import parse_if_range_header +from ..http import parse_list_header +from ..http import parse_options_header +from ..http import parse_range_header +from ..http import parse_set_header +from ..urls import url_decode +from ..user_agent import UserAgent +from ..useragents import _UserAgent as _DeprecatedUserAgent +from ..utils import cached_property +from ..utils import header_property +from .utils import get_current_url +from .utils import get_host + + +class Request: + """Represents the non-IO parts of a HTTP request, including the + method, URL info, and headers. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Request`. + + :param method: The method the request was made with, such as + ``GET``. + :param scheme: The URL scheme of the protocol the request used, such + as ``https`` or ``wss``. + :param server: The address of the server. ``(host, port)``, + ``(path, None)`` for unix sockets, or ``None`` if not known. + :param root_path: The prefix that the application is mounted under. + This is prepended to generated URLs, but is not part of route + matching. + :param path: The path part of the URL after ``root_path``. + :param query_string: The part of the URL after the "?". + :param headers: The headers received with the request. + :param remote_addr: The address of the client sending the request. + + .. versionadded:: 2.0 + """ + + #: The charset used to decode most data in the request. + charset = "utf-8" + + #: the error handling procedure for errors, defaults to 'replace' + encoding_errors = "replace" + + #: the class to use for `args` and `form`. The default is an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports + #: multiple values per key. alternatively it makes sense to use an + #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which + #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` + #: which is the fastest but only remembers the last key. It is also + #: possible to use mutable structures, but this is not recommended. + #: + #: .. versionadded:: 0.6 + parameter_storage_class: t.Type[MultiDict] = ImmutableMultiDict + + #: The type to be used for dict values from the incoming WSGI + #: environment. (For example for :attr:`cookies`.) By default an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used. + #: + #: .. versionchanged:: 1.0.0 + #: Changed to ``ImmutableMultiDict`` to support multiple values. + #: + #: .. versionadded:: 0.6 + dict_storage_class: t.Type[MultiDict] = ImmutableMultiDict + + #: the type to be used for list values from the incoming WSGI environment. + #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used + #: (for example for :attr:`access_list`). + #: + #: .. versionadded:: 0.6 + list_storage_class: t.Type[t.List] = ImmutableList + + user_agent_class = _DeprecatedUserAgent + """The class used and returned by the :attr:`user_agent` property to + parse the header. Defaults to + :class:`~werkzeug.user_agent.UserAgent`, which does no parsing. An + extension can provide a subclass that uses a parser to provide other + data. + + .. versionadded:: 2.0 + """ + + #: Valid host names when handling requests. By default all hosts are + #: trusted, which means that whatever the client says the host is + #: will be accepted. + #: + #: Because ``Host`` and ``X-Forwarded-Host`` headers can be set to + #: any value by a malicious client, it is recommended to either set + #: this property or implement similar validation in the proxy (if + #: the application is being run behind one). + #: + #: .. versionadded:: 0.9 + trusted_hosts: t.Optional[t.List[str]] = None + + def __init__( + self, + method: str, + scheme: str, + server: t.Optional[t.Tuple[str, t.Optional[int]]], + root_path: str, + path: str, + query_string: bytes, + headers: Headers, + remote_addr: t.Optional[str], + ) -> None: + #: The method the request was made with, such as ``GET``. + self.method = method.upper() + #: The URL scheme of the protocol the request used, such as + #: ``https`` or ``wss``. + self.scheme = scheme + #: The address of the server. ``(host, port)``, ``(path, None)`` + #: for unix sockets, or ``None`` if not known. + self.server = server + #: The prefix that the application is mounted under, without a + #: trailing slash. :attr:`path` comes after this. + self.root_path = root_path.rstrip("/") + #: The path part of the URL after :attr:`root_path`. This is the + #: path used for routing within the application. + self.path = "/" + path.lstrip("/") + #: The part of the URL after the "?". This is the raw value, use + #: :attr:`args` for the parsed values. + self.query_string = query_string + #: The headers received with the request. + self.headers = headers + #: The address of the client sending the request. + self.remote_addr = remote_addr + + def __repr__(self) -> str: + try: + url = self.url + except Exception as e: + url = f"(invalid URL: {e})" + + return f"<{type(self).__name__} {url!r} [{self.method}]>" + + @property + def url_charset(self) -> str: + """The charset that is assumed for URLs. Defaults to the value + of :attr:`charset`. + + .. versionadded:: 0.6 + """ + return self.charset + + @cached_property + def args(self) -> "MultiDict[str, str]": + """The parsed URL parameters (the part in the URL after the question + mark). + + By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + """ + return url_decode( + self.query_string, + self.url_charset, + errors=self.encoding_errors, + cls=self.parameter_storage_class, + ) + + @cached_property + def access_route(self) -> t.List[str]: + """If a forwarded header exists this is a list of all ip addresses + from the client ip to the last proxy server. + """ + if "X-Forwarded-For" in self.headers: + return self.list_storage_class( + parse_list_header(self.headers["X-Forwarded-For"]) + ) + elif self.remote_addr is not None: + return self.list_storage_class([self.remote_addr]) + return self.list_storage_class() + + @cached_property + def full_path(self) -> str: + """Requested path, including the query string.""" + return f"{self.path}?{_to_str(self.query_string, self.url_charset)}" + + @property + def is_secure(self) -> bool: + """``True`` if the request was made with a secure protocol + (HTTPS or WSS). + """ + return self.scheme in {"https", "wss"} + + @cached_property + def url(self) -> str: + """The full request URL with the scheme, host, root path, path, + and query string.""" + return get_current_url( + self.scheme, self.host, self.root_path, self.path, self.query_string + ) + + @cached_property + def base_url(self) -> str: + """Like :attr:`url` but without the query string.""" + return get_current_url(self.scheme, self.host, self.root_path, self.path) + + @cached_property + def root_url(self) -> str: + """The request URL scheme, host, and root path. This is the root + that the application is accessed from. + """ + return get_current_url(self.scheme, self.host, self.root_path) + + @cached_property + def host_url(self) -> str: + """The request URL scheme and host only.""" + return get_current_url(self.scheme, self.host) + + @cached_property + def host(self) -> str: + """The host name the request was made to, including the port if + it's non-standard. Validated with :attr:`trusted_hosts`. + """ + return get_host( + self.scheme, self.headers.get("host"), self.server, self.trusted_hosts + ) + + @cached_property + def cookies(self) -> "ImmutableMultiDict[str, str]": + """A :class:`dict` with the contents of all cookies transmitted with + the request.""" + wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie")) + return parse_cookie( # type: ignore + wsgi_combined_cookie, + self.charset, + self.encoding_errors, + cls=self.dict_storage_class, + ) + + # Common Descriptors + + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + read_only=True, + ) + + @cached_property + def content_length(self) -> t.Optional[int]: + """The Content-Length entity-header field indicates the size of the + entity-body in bytes or, in the case of the HEAD method, the size of + the entity-body that would have been sent had the request been a + GET. + """ + if self.headers.get("Transfer-Encoding", "") == "chunked": + return None + + content_length = self.headers.get("Content-Length") + if content_length is not None: + try: + return max(0, int(content_length)) + except (ValueError, TypeError): + pass + + return None + + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field. + + .. versionadded:: 0.9""", + read_only=True, + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.) + + .. versionadded:: 0.9""", + read_only=True, + ) + referrer = header_property[str]( + "Referer", + doc="""The Referer[sic] request-header field allows the client + to specify, for the server's benefit, the address (URI) of the + resource from which the Request-URI was obtained (the + "referrer", although the header field is misspelled).""", + read_only=True, + ) + date = header_property( + "Date", + None, + parse_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + read_only=True, + ) + max_forwards = header_property( + "Max-Forwards", + None, + int, + doc="""The Max-Forwards request-header field provides a + mechanism with the TRACE and OPTIONS methods to limit the number + of proxies or gateways that can forward the request to the next + inbound server.""", + read_only=True, + ) + + def _parse_content_type(self) -> None: + if not hasattr(self, "_parsed_content_type"): + self._parsed_content_type = parse_options_header( + self.headers.get("Content-Type", "") + ) + + @property + def mimetype(self) -> str: + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self) -> t.Dict[str, str]: + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + """ + self._parse_content_type() + return self._parsed_content_type[1] + + @cached_property + def pragma(self) -> HeaderSet: + """The Pragma general-header field is used to include + implementation-specific directives that might apply to any recipient + along the request/response chain. All pragma directives specify + optional behavior from the viewpoint of the protocol; however, some + systems MAY require that behavior be consistent with the directives. + """ + return parse_set_header(self.headers.get("Pragma", "")) + + # Accept + + @cached_property + def accept_mimetypes(self) -> MIMEAccept: + """List of mimetypes this client supports as + :class:`~werkzeug.datastructures.MIMEAccept` object. + """ + return parse_accept_header(self.headers.get("Accept"), MIMEAccept) + + @cached_property + def accept_charsets(self) -> CharsetAccept: + """List of charsets this client supports as + :class:`~werkzeug.datastructures.CharsetAccept` object. + """ + return parse_accept_header(self.headers.get("Accept-Charset"), CharsetAccept) + + @cached_property + def accept_encodings(self) -> Accept: + """List of encodings this client accepts. Encodings in a HTTP term + are compression encodings such as gzip. For charsets have a look at + :attr:`accept_charset`. + """ + return parse_accept_header(self.headers.get("Accept-Encoding")) + + @cached_property + def accept_languages(self) -> LanguageAccept: + """List of languages this client accepts as + :class:`~werkzeug.datastructures.LanguageAccept` object. + + .. versionchanged 0.5 + In previous versions this was a regular + :class:`~werkzeug.datastructures.Accept` object. + """ + return parse_accept_header(self.headers.get("Accept-Language"), LanguageAccept) + + # ETag + + @cached_property + def cache_control(self) -> RequestCacheControl: + """A :class:`~werkzeug.datastructures.RequestCacheControl` object + for the incoming cache control headers. + """ + cache_control = self.headers.get("Cache-Control") + return parse_cache_control_header(cache_control, None, RequestCacheControl) + + @cached_property + def if_match(self) -> ETags: + """An object containing all the etags in the `If-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-Match")) + + @cached_property + def if_none_match(self) -> ETags: + """An object containing all the etags in the `If-None-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-None-Match")) + + @cached_property + def if_modified_since(self) -> t.Optional[datetime]: + """The parsed `If-Modified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Modified-Since")) + + @cached_property + def if_unmodified_since(self) -> t.Optional[datetime]: + """The parsed `If-Unmodified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Unmodified-Since")) + + @cached_property + def if_range(self) -> IfRange: + """The parsed ``If-Range`` header. + + .. versionchanged:: 2.0 + ``IfRange.date`` is timezone-aware. + + .. versionadded:: 0.7 + """ + return parse_if_range_header(self.headers.get("If-Range")) + + @cached_property + def range(self) -> t.Optional[Range]: + """The parsed `Range` header. + + .. versionadded:: 0.7 + + :rtype: :class:`~werkzeug.datastructures.Range` + """ + return parse_range_header(self.headers.get("Range")) + + # User Agent + + @cached_property + def user_agent(self) -> UserAgent: + """The user agent. Use ``user_agent.string`` to get the header + value. Set :attr:`user_agent_class` to a subclass of + :class:`~werkzeug.user_agent.UserAgent` to provide parsing for + the other properties or other extended data. + + .. versionchanged:: 2.0 + The built in parser is deprecated and will be removed in + Werkzeug 2.1. A ``UserAgent`` subclass must be set to parse + data from the string. + """ + return self.user_agent_class(self.headers.get("User-Agent", "")) + + # Authorization + + @cached_property + def authorization(self) -> t.Optional[Authorization]: + """The `Authorization` object in parsed form.""" + return parse_authorization_header(self.headers.get("Authorization")) + + # CORS + + origin = header_property[str]( + "Origin", + doc=( + "The host that the request originated from. Set" + " :attr:`~CORSResponseMixin.access_control_allow_origin` on" + " the response to indicate which origins are allowed." + ), + read_only=True, + ) + + access_control_request_headers = header_property( + "Access-Control-Request-Headers", + load_func=parse_set_header, + doc=( + "Sent with a preflight request to indicate which headers" + " will be sent with the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_headers`" + " on the response to indicate which headers are allowed." + ), + read_only=True, + ) + + access_control_request_method = header_property[str]( + "Access-Control-Request-Method", + doc=( + "Sent with a preflight request to indicate which method" + " will be used for the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_methods`" + " on the response to indicate which methods are allowed." + ), + read_only=True, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py new file mode 100644 index 0000000..82817e8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py @@ -0,0 +1,704 @@ +import typing as t +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from http import HTTPStatus + +from .._internal import _to_str +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..http import dump_cookie +from ..http import HTTP_STATUS_CODES +from ..utils import get_content_type +from werkzeug.datastructures import CallbackDict +from werkzeug.datastructures import ContentRange +from werkzeug.datastructures import ContentSecurityPolicy +from werkzeug.datastructures import ResponseCacheControl +from werkzeug.datastructures import WWWAuthenticate +from werkzeug.http import COEP +from werkzeug.http import COOP +from werkzeug.http import dump_age +from werkzeug.http import dump_header +from werkzeug.http import dump_options_header +from werkzeug.http import http_date +from werkzeug.http import parse_age +from werkzeug.http import parse_cache_control_header +from werkzeug.http import parse_content_range_header +from werkzeug.http import parse_csp_header +from werkzeug.http import parse_date +from werkzeug.http import parse_options_header +from werkzeug.http import parse_set_header +from werkzeug.http import parse_www_authenticate_header +from werkzeug.http import quote_etag +from werkzeug.http import unquote_etag +from werkzeug.utils import header_property + + +def _set_property(name: str, doc: t.Optional[str] = None) -> property: + def fget(self: "Response") -> HeaderSet: + def on_update(header_set: HeaderSet) -> None: + if not header_set and name in self.headers: + del self.headers[name] + elif header_set: + self.headers[name] = header_set.to_header() + + return parse_set_header(self.headers.get(name), on_update) + + def fset( + self: "Response", + value: t.Optional[ + t.Union[str, t.Dict[str, t.Union[str, int]], t.Iterable[str]] + ], + ) -> None: + if not value: + del self.headers[name] + elif isinstance(value, str): + self.headers[name] = value + else: + self.headers[name] = dump_header(value) + + return property(fget, fset, doc=doc) + + +class Response: + """Represents the non-IO parts of an HTTP response, specifically the + status and headers but not the body. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Response`. + + :param status: The status code for the response. Either an int, in + which case the default status message is added, or a string in + the form ``{code} {message}``, like ``404 Not Found``. Defaults + to 200. + :param headers: A :class:`~werkzeug.datastructures.Headers` object, + or a list of ``(key, value)`` tuples that will be converted to a + ``Headers`` object. + :param mimetype: The mime type (content type without charset or + other parameters) of the response. If the value starts with + ``text/`` (or matches some other special cases), the charset + will be added to create the ``content_type``. + :param content_type: The full content type of the response. + Overrides building the value from ``mimetype``. + + .. versionadded:: 2.0 + """ + + #: the charset of the response. + charset = "utf-8" + + #: the default status if none is provided. + default_status = 200 + + #: the default mimetype if none is provided. + default_mimetype = "text/plain" + + #: Warn if a cookie header exceeds this size. The default, 4093, should be + #: safely `supported by most browsers `_. A cookie larger than + #: this size will still be sent, but it may be ignored or handled + #: incorrectly by some browsers. Set to 0 to disable this check. + #: + #: .. versionadded:: 0.13 + #: + #: .. _`cookie`: http://browsercookielimits.squawky.net/ + max_cookie_size = 4093 + + # A :class:`Headers` object representing the response headers. + headers: Headers + + def __init__( + self, + status: t.Optional[t.Union[int, str, HTTPStatus]] = None, + headers: t.Optional[ + t.Union[ + t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]], + t.Iterable[t.Tuple[str, t.Union[str, int]]], + ] + ] = None, + mimetype: t.Optional[str] = None, + content_type: t.Optional[str] = None, + ) -> None: + if isinstance(headers, Headers): + self.headers = headers + elif not headers: + self.headers = Headers() + else: + self.headers = Headers(headers) + + if content_type is None: + if mimetype is None and "content-type" not in self.headers: + mimetype = self.default_mimetype + if mimetype is not None: + mimetype = get_content_type(mimetype, self.charset) + content_type = mimetype + if content_type is not None: + self.headers["Content-Type"] = content_type + if status is None: + status = self.default_status + self.status = status # type: ignore + + def __repr__(self) -> str: + return f"<{type(self).__name__} [{self.status}]>" + + @property + def status_code(self) -> int: + """The HTTP status code as a number.""" + return self._status_code + + @status_code.setter + def status_code(self, code: int) -> None: + self.status = code # type: ignore + + @property + def status(self) -> str: + """The HTTP status code as a string.""" + return self._status + + @status.setter + def status(self, value: t.Union[str, int, HTTPStatus]) -> None: + if not isinstance(value, (str, bytes, int, HTTPStatus)): + raise TypeError("Invalid status argument") + + self._status, self._status_code = self._clean_status(value) + + def _clean_status(self, value: t.Union[str, int, HTTPStatus]) -> t.Tuple[str, int]: + if isinstance(value, HTTPStatus): + value = int(value) + status = _to_str(value, self.charset) + split_status = status.split(None, 1) + + if len(split_status) == 0: + raise ValueError("Empty status argument") + + if len(split_status) > 1: + if split_status[0].isdigit(): + # code and message + return status, int(split_status[0]) + + # multi-word message + return f"0 {status}", 0 + + if split_status[0].isdigit(): + # code only + status_code = int(split_status[0]) + + try: + status = f"{status_code} {HTTP_STATUS_CODES[status_code].upper()}" + except KeyError: + status = f"{status_code} UNKNOWN" + + return status, status_code + + # one-word message + return f"0 {status}", 0 + + def set_cookie( + self, + key: str, + value: str = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: t.Optional[str] = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Sets a cookie. + + A warning is raised if the size of the cookie header exceeds + :attr:`max_cookie_size`, but the header will still be set. + + :param key: the key (name) of the cookie to be set. + :param value: the value of the cookie. + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. + :param expires: should be a `datetime` object or UNIX timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: if you want to set a cross-domain cookie. For example, + ``domain=".example.com"`` will set a cookie that is + readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + self.headers.add( + "Set-Cookie", + dump_cookie( + key, + value=value, + max_age=max_age, + expires=expires, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + charset=self.charset, + max_size=self.max_cookie_size, + samesite=samesite, + ), + ) + + def delete_cookie( + self, + key: str, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Delete a cookie. Fails silently if key doesn't exist. + + :param key: the key (name) of the cookie to be deleted. + :param path: if the cookie that should be deleted was limited to a + path, the path has to be defined here. + :param domain: if the cookie that should be deleted was limited to a + domain, that domain has to be defined here. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + self.set_cookie( + key, + expires=0, + max_age=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return mt is not None and ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) + + # Common Descriptors + + @property + def mimetype(self) -> t.Optional[str]: + """The mimetype (content type without charset etc.)""" + ct = self.headers.get("content-type") + + if ct: + return ct.split(";")[0].strip() + else: + return None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.headers["Content-Type"] = get_content_type(value, self.charset) + + @property + def mimetype_params(self) -> t.Dict[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.5 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + location = header_property[str]( + "Location", + doc="""The Location response-header field is used to redirect + the recipient to a location other than the Request-URI for + completion of the request or identification of a new + resource.""", + ) + age = header_property( + "Age", + None, + parse_age, + dump_age, # type: ignore + doc="""The Age response-header field conveys the sender's + estimate of the amount of time since the response (or its + revalidation) was generated at the origin server. + + Age values are non-negative decimal integers, representing time + in seconds.""", + ) + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + ) + content_length = header_property( + "Content-Length", + None, + int, + str, + doc="""The Content-Length entity-header field indicates the size + of the entity-body, in decimal number of OCTETs, sent to the + recipient or, in the case of the HEAD method, the size of the + entity-body that would have been sent had the request been a + GET.""", + ) + content_location = header_property[str]( + "Content-Location", + doc="""The Content-Location entity-header field MAY be used to + supply the resource location for the entity enclosed in the + message when that entity is accessible from a location separate + from the requested resource's URI.""", + ) + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field.""", + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.)""", + ) + date = header_property( + "Date", + None, + parse_date, + http_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + expires = header_property( + "Expires", + None, + parse_date, + http_date, + doc="""The Expires entity-header field gives the date/time after + which the response is considered stale. A stale cache entry may + not normally be returned by a cache. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + last_modified = header_property( + "Last-Modified", + None, + parse_date, + http_date, + doc="""The Last-Modified entity-header field indicates the date + and time at which the origin server believes the variant was + last modified. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + + @property + def retry_after(self) -> t.Optional[datetime]: + """The Retry-After response-header field can be used with a + 503 (Service Unavailable) response to indicate how long the + service is expected to be unavailable to the requesting client. + + Time in seconds until expiration or date. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + value = self.headers.get("retry-after") + if value is None: + return None + elif value.isdigit(): + return datetime.now(timezone.utc) + timedelta(seconds=int(value)) + return parse_date(value) + + @retry_after.setter + def retry_after(self, value: t.Optional[t.Union[datetime, int, str]]) -> None: + if value is None: + if "retry-after" in self.headers: + del self.headers["retry-after"] + return + elif isinstance(value, datetime): + value = http_date(value) + else: + value = str(value) + self.headers["Retry-After"] = value + + vary = _set_property( + "Vary", + doc="""The Vary field value indicates the set of request-header + fields that fully determines, while the response is fresh, + whether a cache is permitted to use the response to reply to a + subsequent request without revalidation.""", + ) + content_language = _set_property( + "Content-Language", + doc="""The Content-Language entity-header field describes the + natural language(s) of the intended audience for the enclosed + entity. Note that this might not be equivalent to all the + languages used within the entity-body.""", + ) + allow = _set_property( + "Allow", + doc="""The Allow entity-header field lists the set of methods + supported by the resource identified by the Request-URI. The + purpose of this field is strictly to inform the recipient of + valid methods associated with the resource. An Allow header + field MUST be present in a 405 (Method Not Allowed) + response.""", + ) + + # ETag + + @property + def cache_control(self) -> ResponseCacheControl: + """The Cache-Control general-header field is used to specify + directives that MUST be obeyed by all caching mechanisms along the + request/response chain. + """ + + def on_update(cache_control: ResponseCacheControl) -> None: + if not cache_control and "cache-control" in self.headers: + del self.headers["cache-control"] + elif cache_control: + self.headers["Cache-Control"] = cache_control.to_header() + + return parse_cache_control_header( + self.headers.get("cache-control"), on_update, ResponseCacheControl + ) + + def set_etag(self, etag: str, weak: bool = False) -> None: + """Set the etag, and override the old one if there was one.""" + self.headers["ETag"] = quote_etag(etag, weak) + + def get_etag(self) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: + """Return a tuple in the form ``(etag, is_weak)``. If there is no + ETag the return value is ``(None, None)``. + """ + return unquote_etag(self.headers.get("ETag")) + + accept_ranges = header_property[str]( + "Accept-Ranges", + doc="""The `Accept-Ranges` header. Even though the name would + indicate that multiple values are supported, it must be one + string token only. + + The values ``'bytes'`` and ``'none'`` are common. + + .. versionadded:: 0.7""", + ) + + @property + def content_range(self) -> ContentRange: + """The ``Content-Range`` header as a + :class:`~werkzeug.datastructures.ContentRange` object. Available + even if the header is not set. + + .. versionadded:: 0.7 + """ + + def on_update(rng: ContentRange) -> None: + if not rng: + del self.headers["content-range"] + else: + self.headers["Content-Range"] = rng.to_header() + + rv = parse_content_range_header(self.headers.get("content-range"), on_update) + # always provide a content range object to make the descriptor + # more user friendly. It provides an unset() method that can be + # used to remove the header quickly. + if rv is None: + rv = ContentRange(None, None, None, on_update=on_update) + return rv + + @content_range.setter + def content_range(self, value: t.Optional[t.Union[ContentRange, str]]) -> None: + if not value: + del self.headers["content-range"] + elif isinstance(value, str): + self.headers["Content-Range"] = value + else: + self.headers["Content-Range"] = value.to_header() + + # Authorization + + @property + def www_authenticate(self) -> WWWAuthenticate: + """The ``WWW-Authenticate`` header in a parsed form.""" + + def on_update(www_auth: WWWAuthenticate) -> None: + if not www_auth and "www-authenticate" in self.headers: + del self.headers["www-authenticate"] + elif www_auth: + self.headers["WWW-Authenticate"] = www_auth.to_header() + + header = self.headers.get("www-authenticate") + return parse_www_authenticate_header(header, on_update) + + # CSP + + @property + def content_security_policy(self) -> ContentSecurityPolicy: + """The ``Content-Security-Policy`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy header adds an additional layer of + security to help detect and mitigate certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy"] + else: + self.headers["Content-Security-Policy"] = csp.to_header() + + rv = parse_csp_header(self.headers.get("content-security-policy"), on_update) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy.setter + def content_security_policy( + self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] + ) -> None: + if not value: + del self.headers["content-security-policy"] + elif isinstance(value, str): + self.headers["Content-Security-Policy"] = value + else: + self.headers["Content-Security-Policy"] = value.to_header() + + @property + def content_security_policy_report_only(self) -> ContentSecurityPolicy: + """The ``Content-Security-policy-report-only`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy-Report-Only header adds a csp policy + that is not enforced but is reported thereby helping detect + certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy-report-only"] + else: + self.headers["Content-Security-policy-report-only"] = csp.to_header() + + rv = parse_csp_header( + self.headers.get("content-security-policy-report-only"), on_update + ) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy_report_only.setter + def content_security_policy_report_only( + self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] + ) -> None: + if not value: + del self.headers["content-security-policy-report-only"] + elif isinstance(value, str): + self.headers["Content-Security-policy-report-only"] = value + else: + self.headers["Content-Security-policy-report-only"] = value.to_header() + + # CORS + + @property + def access_control_allow_credentials(self) -> bool: + """Whether credentials can be shared by the browser to + JavaScript code. As part of the preflight request it indicates + whether credentials can be used on the cross origin request. + """ + return "Access-Control-Allow-Credentials" in self.headers + + @access_control_allow_credentials.setter + def access_control_allow_credentials(self, value: t.Optional[bool]) -> None: + if value is True: + self.headers["Access-Control-Allow-Credentials"] = "true" + else: + self.headers.pop("Access-Control-Allow-Credentials", None) + + access_control_allow_headers = header_property( + "Access-Control-Allow-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be sent with the cross origin request.", + ) + + access_control_allow_methods = header_property( + "Access-Control-Allow-Methods", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which methods can be used for the cross origin request.", + ) + + access_control_allow_origin = header_property[str]( + "Access-Control-Allow-Origin", + doc="The origin or '*' for any origin that may make cross origin requests.", + ) + + access_control_expose_headers = header_property( + "Access-Control-Expose-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be shared by the browser to JavaScript code.", + ) + + access_control_max_age = header_property( + "Access-Control-Max-Age", + load_func=int, + dump_func=str, + doc="The maximum age in seconds the access control settings can be cached for.", + ) + + cross_origin_opener_policy = header_property[COOP]( + "Cross-Origin-Opener-Policy", + load_func=lambda value: COOP(value), + dump_func=lambda value: value.value, + default=COOP.UNSAFE_NONE, + doc="""Allows control over sharing of browsing context group with cross-origin + documents. Values must be a member of the :class:`werkzeug.http.COOP` enum.""", + ) + + cross_origin_embedder_policy = header_property[COEP]( + "Cross-Origin-Embedder-Policy", + load_func=lambda value: COEP(value), + dump_func=lambda value: value.value, + default=COEP.UNSAFE_NONE, + doc="""Prevents a document from loading any cross-origin resources that do not + explicitly grant the document permission. Values must be a member of the + :class:`werkzeug.http.COEP` enum.""", + ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py new file mode 100644 index 0000000..1b4d892 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py @@ -0,0 +1,142 @@ +import typing as t + +from .._internal import _encode_idna +from ..exceptions import SecurityError +from ..urls import uri_to_iri +from ..urls import url_quote + + +def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool: + """Check if a host matches a list of trusted names. + + :param hostname: The name to check. + :param trusted_list: A list of valid names to match. If a name + starts with a dot it will match all subdomains. + + .. versionadded:: 0.9 + """ + if not hostname: + return False + + if isinstance(trusted_list, str): + trusted_list = [trusted_list] + + def _normalize(hostname: str) -> bytes: + if ":" in hostname: + hostname = hostname.rsplit(":", 1)[0] + + return _encode_idna(hostname) + + try: + hostname_bytes = _normalize(hostname) + except UnicodeError: + return False + + for ref in trusted_list: + if ref.startswith("."): + ref = ref[1:] + suffix_match = True + else: + suffix_match = False + + try: + ref_bytes = _normalize(ref) + except UnicodeError: + return False + + if ref_bytes == hostname_bytes: + return True + + if suffix_match and hostname_bytes.endswith(b"." + ref_bytes): + return True + + return False + + +def get_host( + scheme: str, + host_header: t.Optional[str], + server: t.Optional[t.Tuple[str, t.Optional[int]]] = None, + trusted_hosts: t.Optional[t.Iterable[str]] = None, +) -> str: + """Return the host for the given parameters. + + This first checks the ``host_header``. If it's not present, then + ``server`` is used. The host will only contain the port if it is + different than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param scheme: The protocol the request used, like ``"https"``. + :param host_header: The ``Host`` header value. + :param server: Address of the server. ``(host, port)``, or + ``(path, None)`` for unix sockets. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + host = "" + + if host_header is not None: + host = host_header + elif server is not None: + host = server[0] + + if server[1] is not None: + host = f"{host}:{server[1]}" + + if scheme in {"http", "ws"} and host.endswith(":80"): + host = host[:-3] + elif scheme in {"https", "wss"} and host.endswith(":443"): + host = host[:-4] + + if trusted_hosts is not None: + if not host_is_trusted(host, trusted_hosts): + raise SecurityError(f"Host {host!r} is not trusted.") + + return host + + +def get_current_url( + scheme: str, + host: str, + root_path: t.Optional[str] = None, + path: t.Optional[str] = None, + query_string: t.Optional[bytes] = None, +) -> str: + """Recreate the URL for a request. If an optional part isn't + provided, it and subsequent parts are not included in the URL. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param scheme: The protocol the request used, like ``"https"``. + :param host: The host the request was made to. See :func:`get_host`. + :param root_path: Prefix that the application is mounted under. This + is prepended to ``path``. + :param path: The path part of the URL after ``root_path``. + :param query_string: The portion of the URL after the "?". + """ + url = [scheme, "://", host] + + if root_path is None: + url.append("/") + return uri_to_iri("".join(url)) + + url.append(url_quote(root_path.rstrip("/"))) + url.append("/") + + if path is None: + return uri_to_iri("".join(url)) + + url.append(url_quote(path.lstrip("/"))) + + if query_string: + url.append("?") + url.append(url_quote(query_string, safe=":&%=+$!*'(),")) + + return uri_to_iri("".join(url)) diff --git a/venv/lib/python3.8/site-packages/werkzeug/security.py b/venv/lib/python3.8/site-packages/werkzeug/security.py new file mode 100644 index 0000000..e23040a --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/security.py @@ -0,0 +1,247 @@ +import hashlib +import hmac +import os +import posixpath +import secrets +import typing as t +import warnings + +if t.TYPE_CHECKING: + pass + +SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" +DEFAULT_PBKDF2_ITERATIONS = 260000 + +_os_alt_seps: t.List[str] = list( + sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/" +) + + +def pbkdf2_hex( + data: t.Union[str, bytes], + salt: t.Union[str, bytes], + iterations: int = DEFAULT_PBKDF2_ITERATIONS, + keylen: t.Optional[int] = None, + hashfunc: t.Optional[t.Union[str, t.Callable]] = None, +) -> str: + """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. + + :param data: the data to derive. + :param salt: the salt for the derivation. + :param iterations: the number of iterations. + :param keylen: the length of the resulting key. If not provided, + the digest size will be used. + :param hashfunc: the hash function to use. This can either be the + string name of a known hash function, or a function + from the hashlib module. Defaults to sha256. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac` + instead. + + .. versionadded:: 0.9 + """ + warnings.warn( + "'pbkdf2_hex' is deprecated and will be removed in Werkzeug" + " 2.1. Use 'hashlib.pbkdf2_hmac().hex()' instead.", + DeprecationWarning, + stacklevel=2, + ) + return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).hex() + + +def pbkdf2_bin( + data: t.Union[str, bytes], + salt: t.Union[str, bytes], + iterations: int = DEFAULT_PBKDF2_ITERATIONS, + keylen: t.Optional[int] = None, + hashfunc: t.Optional[t.Union[str, t.Callable]] = None, +) -> bytes: + """Returns a binary digest for the PBKDF2 hash algorithm of `data` + with the given `salt`. It iterates `iterations` times and produces a + key of `keylen` bytes. By default, SHA-256 is used as hash function; + a different hashlib `hashfunc` can be provided. + + :param data: the data to derive. + :param salt: the salt for the derivation. + :param iterations: the number of iterations. + :param keylen: the length of the resulting key. If not provided + the digest size will be used. + :param hashfunc: the hash function to use. This can either be the + string name of a known hash function or a function + from the hashlib module. Defaults to sha256. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac` + instead. + + .. versionadded:: 0.9 + """ + warnings.warn( + "'pbkdf2_bin' is deprecated and will be removed in Werkzeug" + " 2.1. Use 'hashlib.pbkdf2_hmac()' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(data, str): + data = data.encode("utf8") + + if isinstance(salt, str): + salt = salt.encode("utf8") + + if not hashfunc: + hash_name = "sha256" + elif callable(hashfunc): + hash_name = hashfunc().name + else: + hash_name = hashfunc + + return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) + + +def safe_str_cmp(a: str, b: str) -> bool: + """This function compares strings in somewhat constant time. This + requires that the length of at least one string is known in advance. + + Returns `True` if the two strings are equal, or `False` if they are not. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use + :func:`hmac.compare_digest` instead. + + .. versionadded:: 0.7 + """ + warnings.warn( + "'safe_str_cmp' is deprecated and will be removed in Werkzeug" + " 2.1. Use 'hmac.compare_digest' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(a, str): + a = a.encode("utf-8") # type: ignore + + if isinstance(b, str): + b = b.encode("utf-8") # type: ignore + + return hmac.compare_digest(a, b) + + +def gen_salt(length: int) -> str: + """Generate a random string of SALT_CHARS with specified ``length``.""" + if length <= 0: + raise ValueError("Salt length must be positive") + + return "".join(secrets.choice(SALT_CHARS) for _ in range(length)) + + +def _hash_internal(method: str, salt: str, password: str) -> t.Tuple[str, str]: + """Internal password hash helper. Supports plaintext without salt, + unsalted and salted passwords. In case salted passwords are used + hmac is used. + """ + if method == "plain": + return password, method + + salt = salt.encode("utf-8") + password = password.encode("utf-8") + + if method.startswith("pbkdf2:"): + if not salt: + raise ValueError("Salt is required for PBKDF2") + + args = method[7:].split(":") + + if len(args) not in (1, 2): + raise ValueError("Invalid number of arguments for PBKDF2") + + method = args.pop(0) + iterations = int(args[0] or 0) if args else DEFAULT_PBKDF2_ITERATIONS + return ( + hashlib.pbkdf2_hmac(method, password, salt, iterations).hex(), + f"pbkdf2:{method}:{iterations}", + ) + + if salt: + return hmac.new(salt, password, method).hexdigest(), method + + return hashlib.new(method, password).hexdigest(), method + + +def generate_password_hash( + password: str, method: str = "pbkdf2:sha256", salt_length: int = 16 +) -> str: + """Hash a password with the given method and salt with a string of + the given length. The format of the string returned includes the method + that was used so that :func:`check_password_hash` can check the hash. + + The format for the hashed string looks like this:: + + method$salt$hash + + This method can **not** generate unsalted passwords but it is possible + to set param method='plain' in order to enforce plaintext passwords. + If a salt is used, hmac is used internally to salt the password. + + If PBKDF2 is wanted it can be enabled by setting the method to + ``pbkdf2:method:iterations`` where iterations is optional:: + + pbkdf2:sha256:80000$salt$hash + pbkdf2:sha256$salt$hash + + :param password: the password to hash. + :param method: the hash method to use (one that hashlib supports). Can + optionally be in the format ``pbkdf2:method:iterations`` + to enable PBKDF2. + :param salt_length: the length of the salt in letters. + """ + salt = gen_salt(salt_length) if method != "plain" else "" + h, actual_method = _hash_internal(method, salt, password) + return f"{actual_method}${salt}${h}" + + +def check_password_hash(pwhash: str, password: str) -> bool: + """Check a password against a given salted and hashed password value. + In order to support unsalted legacy passwords this method supports + plain text passwords, md5 and sha1 hashes (both salted and unsalted). + + Returns `True` if the password matched, `False` otherwise. + + :param pwhash: a hashed string like returned by + :func:`generate_password_hash`. + :param password: the plaintext password to compare against the hash. + """ + if pwhash.count("$") < 2: + return False + + method, salt, hashval = pwhash.split("$", 2) + return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval) + + +def safe_join(directory: str, *pathnames: str) -> t.Optional[str]: + """Safely join zero or more untrusted path components to a base + directory to avoid escaping the base directory. + + :param directory: The trusted base directory. + :param pathnames: The untrusted path components relative to the + base directory. + :return: A safe path, otherwise ``None``. + """ + parts = [directory] + + for filename in pathnames: + if filename != "": + filename = posixpath.normpath(filename) + + if ( + any(sep in filename for sep in _os_alt_seps) + or os.path.isabs(filename) + or filename == ".." + or filename.startswith("../") + ): + return None + + parts.append(filename) + + return posixpath.join(*parts) diff --git a/venv/lib/python3.8/site-packages/werkzeug/serving.py b/venv/lib/python3.8/site-packages/werkzeug/serving.py new file mode 100644 index 0000000..197b6fb --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/serving.py @@ -0,0 +1,1081 @@ +"""A WSGI and HTTP server for use **during development only**. This +server is convenient to use, but is not designed to be particularly +stable, secure, or efficient. Use a dedicate WSGI server and HTTP +server when deploying to production. + +It provides features like interactive debugging and code reloading. Use +``run_simple`` to start the server. Put this in a ``run.py`` script: + +.. code-block:: python + + from myapp import create_app + from werkzeug import run_simple +""" +import io +import os +import platform +import signal +import socket +import socketserver +import sys +import typing as t +import warnings +from datetime import datetime as dt +from datetime import timedelta +from datetime import timezone +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer + +from ._internal import _log +from ._internal import _wsgi_encoding_dance +from .exceptions import InternalServerError +from .urls import uri_to_iri +from .urls import url_parse +from .urls import url_unquote + +try: + import ssl +except ImportError: + + class _SslDummy: + def __getattr__(self, name: str) -> t.Any: + raise RuntimeError("SSL support unavailable") # noqa: B904 + + ssl = _SslDummy() # type: ignore + +_log_add_style = True + +if os.name == "nt": + try: + __import__("colorama") + except ImportError: + _log_add_style = False + +can_fork = hasattr(os, "fork") + +if can_fork: + ForkingMixIn = socketserver.ForkingMixIn +else: + + class ForkingMixIn: # type: ignore + pass + + +try: + af_unix = socket.AF_UNIX +except AttributeError: + af_unix = None # type: ignore + +LISTEN_QUEUE = 128 +can_open_by_fd = not platform.system() == "Windows" and hasattr(socket, "fromfd") + +_TSSLContextArg = t.Optional[ + t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], "te.Literal['adhoc']"] +] + +if t.TYPE_CHECKING: + import typing_extensions as te # noqa: F401 + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, + ) + from cryptography.x509 import Certificate + + +class DechunkedInput(io.RawIOBase): + """An input stream that handles Transfer-Encoding 'chunked'""" + + def __init__(self, rfile: t.IO[bytes]) -> None: + self._rfile = rfile + self._done = False + self._len = 0 + + def readable(self) -> bool: + return True + + def read_chunk_len(self) -> int: + try: + line = self._rfile.readline().decode("latin1") + _len = int(line.strip(), 16) + except ValueError as e: + raise OSError("Invalid chunk header") from e + if _len < 0: + raise OSError("Negative chunk length not allowed") + return _len + + def readinto(self, buf: bytearray) -> int: # type: ignore + read = 0 + while not self._done and read < len(buf): + if self._len == 0: + # This is the first chunk or we fully consumed the previous + # one. Read the next length of the next chunk + self._len = self.read_chunk_len() + + if self._len == 0: + # Found the final chunk of size 0. The stream is now exhausted, + # but there is still a final newline that should be consumed + self._done = True + + if self._len > 0: + # There is data (left) in this chunk, so append it to the + # buffer. If this operation fully consumes the chunk, this will + # reset self._len to 0. + n = min(len(buf), self._len) + + # If (read + chunk size) becomes more than len(buf), buf will + # grow beyond the original size and read more data than + # required. So only read as much data as can fit in buf. + if read + n > len(buf): + buf[read:] = self._rfile.read(len(buf) - read) + self._len -= len(buf) - read + read = len(buf) + else: + buf[read : read + n] = self._rfile.read(n) + self._len -= n + read += n + + if self._len == 0: + # Skip the terminating newline of a chunk that has been fully + # consumed. This also applies to the 0-sized final chunk + terminator = self._rfile.readline() + if terminator not in (b"\n", b"\r\n", b"\r"): + raise OSError("Missing chunk terminating newline") + + return read + + +class WSGIRequestHandler(BaseHTTPRequestHandler): + """A request handler that implements WSGI dispatching.""" + + server: "BaseWSGIServer" + + @property + def server_version(self) -> str: # type: ignore + from . import __version__ + + return f"Werkzeug/{__version__}" + + def make_environ(self) -> "WSGIEnvironment": + request_url = url_parse(self.path) + + def shutdown_server() -> None: + warnings.warn( + "The 'environ['werkzeug.server.shutdown']' function is" + " deprecated and will be removed in Werkzeug 2.1.", + stacklevel=2, + ) + self.server.shutdown_signal = True + + url_scheme = "http" if self.server.ssl_context is None else "https" + + if not self.client_address: + self.client_address = ("", 0) + elif isinstance(self.client_address, str): + self.client_address = (self.client_address, 0) + + # If there was no scheme but the path started with two slashes, + # the first segment may have been incorrectly parsed as the + # netloc, prepend it to the path again. + if not request_url.scheme and request_url.netloc: + path_info = f"/{request_url.netloc}{request_url.path}" + else: + path_info = request_url.path + + path_info = url_unquote(path_info) + + environ: "WSGIEnvironment" = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": url_scheme, + "wsgi.input": self.rfile, + "wsgi.errors": sys.stderr, + "wsgi.multithread": self.server.multithread, + "wsgi.multiprocess": self.server.multiprocess, + "wsgi.run_once": False, + "werkzeug.server.shutdown": shutdown_server, + "werkzeug.socket": self.connection, + "SERVER_SOFTWARE": self.server_version, + "REQUEST_METHOD": self.command, + "SCRIPT_NAME": "", + "PATH_INFO": _wsgi_encoding_dance(path_info), + "QUERY_STRING": _wsgi_encoding_dance(request_url.query), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": _wsgi_encoding_dance(self.path), + # Non-standard, added by gunicorn + "RAW_URI": _wsgi_encoding_dance(self.path), + "REMOTE_ADDR": self.address_string(), + "REMOTE_PORT": self.port_integer(), + "SERVER_NAME": self.server.server_address[0], + "SERVER_PORT": str(self.server.server_address[1]), + "SERVER_PROTOCOL": self.request_version, + } + + for key, value in self.headers.items(): + key = key.upper().replace("-", "_") + value = value.replace("\r\n", "") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = f"HTTP_{key}" + if key in environ: + value = f"{environ[key]},{value}" + environ[key] = value + + if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked": + environ["wsgi.input_terminated"] = True + environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"]) + + # Per RFC 2616, if the URL is absolute, use that as the host. + # We're using "has a scheme" to indicate an absolute URL. + if request_url.scheme and request_url.netloc: + environ["HTTP_HOST"] = request_url.netloc + + try: + # binary_form=False gives nicer information, but wouldn't be compatible with + # what Nginx or Apache could return. + peer_cert = self.connection.getpeercert(binary_form=True) + if peer_cert is not None: + # Nginx and Apache use PEM format. + environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert) + except ValueError: + # SSL handshake hasn't finished. + self.server.log("error", "Cannot fetch SSL peer certificate info") + except AttributeError: + # Not using TLS, the socket will not have getpeercert(). + pass + + return environ + + def run_wsgi(self) -> None: + if self.headers.get("Expect", "").lower().strip() == "100-continue": + self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n") + + self.environ = environ = self.make_environ() + status_set: t.Optional[str] = None + headers_set: t.Optional[t.List[t.Tuple[str, str]]] = None + status_sent: t.Optional[str] = None + headers_sent: t.Optional[t.List[t.Tuple[str, str]]] = None + + def write(data: bytes) -> None: + nonlocal status_sent, headers_sent + assert status_set is not None, "write() before start_response" + assert headers_set is not None, "write() before start_response" + if status_sent is None: + status_sent = status_set + headers_sent = headers_set + try: + code_str, msg = status_sent.split(None, 1) + except ValueError: + code_str, msg = status_sent, "" + code = int(code_str) + self.send_response(code, msg) + header_keys = set() + for key, value in headers_sent: + self.send_header(key, value) + key = key.lower() + header_keys.add(key) + if not ( + "content-length" in header_keys + or environ["REQUEST_METHOD"] == "HEAD" + or code < 200 + or code in (204, 304) + ): + self.close_connection = True + self.send_header("Connection", "close") + if "server" not in header_keys: + self.send_header("Server", self.version_string()) + if "date" not in header_keys: + self.send_header("Date", self.date_time_string()) + self.end_headers() + + assert isinstance(data, bytes), "applications must write bytes" + self.wfile.write(data) + self.wfile.flush() + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal status_set, headers_set + if exc_info: + try: + if headers_sent: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + elif headers_set: + raise AssertionError("Headers already set") + status_set = status + headers_set = headers + return write + + def execute(app: "WSGIApplication") -> None: + application_iter = app(environ, start_response) + try: + for data in application_iter: + write(data) + if not headers_sent: + write(b"") + finally: + if hasattr(application_iter, "close"): + application_iter.close() # type: ignore + + try: + execute(self.server.app) + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e, environ) + except Exception: + if self.server.passthrough_errors: + raise + from .debug.tbtools import get_current_traceback + + traceback = get_current_traceback(ignore_system_exceptions=True) + try: + # if we haven't yet sent the headers but they are set + # we roll back to be able to set them again. + if status_sent is None: + status_set = None + headers_set = None + execute(InternalServerError()) + except Exception: + pass + self.server.log("error", "Error on request:\n%s", traceback.plaintext) + + def handle(self) -> None: + """Handles a request ignoring dropped connections.""" + try: + BaseHTTPRequestHandler.handle(self) + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e) + except Exception as e: + if self.server.ssl_context is not None and is_ssl_error(e): + self.log_error("SSL error occurred: %s", e) + else: + raise + if self.server.shutdown_signal: + self.initiate_shutdown() + + def initiate_shutdown(self) -> None: + if is_running_from_reloader(): + # Windows does not provide SIGKILL, go with SIGTERM then. + sig = getattr(signal, "SIGKILL", signal.SIGTERM) + os.kill(os.getpid(), sig) + + self.server._BaseServer__shutdown_request = True # type: ignore + + def connection_dropped( + self, error: BaseException, environ: t.Optional["WSGIEnvironment"] = None + ) -> None: + """Called if the connection was closed by the client. By default + nothing happens. + """ + + def handle_one_request(self) -> None: + """Handle a single HTTP request.""" + self.raw_requestline = self.rfile.readline() + if not self.raw_requestline: + self.close_connection = True + elif self.parse_request(): + self.run_wsgi() + + def send_response(self, code: int, message: t.Optional[str] = None) -> None: + """Send the response header and log the response code.""" + self.log_request(code) + if message is None: + message = self.responses[code][0] if code in self.responses else "" + if self.request_version != "HTTP/0.9": + hdr = f"{self.protocol_version} {code} {message}\r\n" + self.wfile.write(hdr.encode("ascii")) + + def version_string(self) -> str: + return super().version_string().strip() + + def address_string(self) -> str: + if getattr(self, "environ", None): + return self.environ["REMOTE_ADDR"] # type: ignore + + if not self.client_address: + return "" + + return self.client_address[0] + + def port_integer(self) -> int: + return self.client_address[1] + + def log_request( + self, code: t.Union[int, str] = "-", size: t.Union[int, str] = "-" + ) -> None: + try: + path = uri_to_iri(self.path) + msg = f"{self.command} {path} {self.request_version}" + except AttributeError: + # path isn't set if the requestline was bad + msg = self.requestline + + code = str(code) + + if _log_add_style: + if code[0] == "1": # 1xx - Informational + msg = _ansi_style(msg, "bold") + elif code == "200": # 2xx - Success + pass + elif code == "304": # 304 - Resource Not Modified + msg = _ansi_style(msg, "cyan") + elif code[0] == "3": # 3xx - Redirection + msg = _ansi_style(msg, "green") + elif code == "404": # 404 - Resource Not Found + msg = _ansi_style(msg, "yellow") + elif code[0] == "4": # 4xx - Client Error + msg = _ansi_style(msg, "bold", "red") + else: # 5xx, or any other response + msg = _ansi_style(msg, "bold", "magenta") + + self.log("info", '"%s" %s %s', msg, code, size) + + def log_error(self, format: str, *args: t.Any) -> None: + self.log("error", format, *args) + + def log_message(self, format: str, *args: t.Any) -> None: + self.log("info", format, *args) + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log( + type, + f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n", + *args, + ) + + +def _ansi_style(value: str, *styles: str) -> str: + codes = { + "bold": 1, + "red": 31, + "green": 32, + "yellow": 33, + "magenta": 35, + "cyan": 36, + } + + for style in styles: + value = f"\x1b[{codes[style]}m{value}" + + return f"{value}\x1b[0m" + + +def generate_adhoc_ssl_pair( + cn: t.Optional[str] = None, +) -> t.Tuple["Certificate", "RSAPrivateKeyWithSerialization"]: + try: + from cryptography import x509 + from cryptography.x509.oid import NameOID + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.asymmetric import rsa + except ImportError: + raise TypeError( + "Using ad-hoc certificates requires the cryptography library." + ) from None + + backend = default_backend() + pkey = rsa.generate_private_key( + public_exponent=65537, key_size=2048, backend=backend + ) + + # pretty damn sure that this is not actually accepted by anyone + if cn is None: + cn = "*" + + subject = x509.Name( + [ + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"), + x509.NameAttribute(NameOID.COMMON_NAME, cn), + ] + ) + + backend = default_backend() + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(subject) + .public_key(pkey.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(dt.now(timezone.utc)) + .not_valid_after(dt.now(timezone.utc) + timedelta(days=365)) + .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False) + .add_extension(x509.SubjectAlternativeName([x509.DNSName(cn)]), critical=False) + .sign(pkey, hashes.SHA256(), backend) + ) + return cert, pkey + + +def make_ssl_devcert( + base_path: str, host: t.Optional[str] = None, cn: t.Optional[str] = None +) -> t.Tuple[str, str]: + """Creates an SSL key for development. This should be used instead of + the ``'adhoc'`` key which generates a new cert on each server start. + It accepts a path for where it should store the key and cert and + either a host or CN. If a host is given it will use the CN + ``*.host/CN=host``. + + For more information see :func:`run_simple`. + + .. versionadded:: 0.9 + + :param base_path: the path to the certificate and key. The extension + ``.crt`` is added for the certificate, ``.key`` is + added for the key. + :param host: the name of the host. This can be used as an alternative + for the `cn`. + :param cn: the `CN` to use. + """ + + if host is not None: + cn = f"*.{host}/CN={host}" + cert, pkey = generate_adhoc_ssl_pair(cn=cn) + + from cryptography.hazmat.primitives import serialization + + cert_file = f"{base_path}.crt" + pkey_file = f"{base_path}.key" + + with open(cert_file, "wb") as f: + f.write(cert.public_bytes(serialization.Encoding.PEM)) + with open(pkey_file, "wb") as f: + f.write( + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return cert_file, pkey_file + + +def generate_adhoc_ssl_context() -> "ssl.SSLContext": + """Generates an adhoc SSL context for the development server.""" + import tempfile + import atexit + + cert, pkey = generate_adhoc_ssl_pair() + + from cryptography.hazmat.primitives import serialization + + cert_handle, cert_file = tempfile.mkstemp() + pkey_handle, pkey_file = tempfile.mkstemp() + atexit.register(os.remove, pkey_file) + atexit.register(os.remove, cert_file) + + os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM)) + os.write( + pkey_handle, + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ), + ) + + os.close(cert_handle) + os.close(pkey_handle) + ctx = load_ssl_context(cert_file, pkey_file) + return ctx + + +def load_ssl_context( + cert_file: str, pkey_file: t.Optional[str] = None, protocol: t.Optional[int] = None +) -> "ssl.SSLContext": + """Loads SSL context from cert/private key files and optional protocol. + Many parameters are directly taken from the API of + :py:class:`ssl.SSLContext`. + + :param cert_file: Path of the certificate to use. + :param pkey_file: Path of the private key to use. If not given, the key + will be obtained from the certificate file. + :param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module. + Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`. + """ + if protocol is None: + protocol = ssl.PROTOCOL_TLS_SERVER + + ctx = ssl.SSLContext(protocol) + ctx.load_cert_chain(cert_file, pkey_file) + return ctx + + +def is_ssl_error(error: t.Optional[Exception] = None) -> bool: + """Checks if the given error (or the current one) is an SSL error.""" + if error is None: + error = t.cast(Exception, sys.exc_info()[1]) + return isinstance(error, ssl.SSLError) + + +def select_address_family(host: str, port: int) -> socket.AddressFamily: + """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on + the host and port.""" + if host.startswith("unix://"): + return socket.AF_UNIX + elif ":" in host and hasattr(socket, "AF_INET6"): + return socket.AF_INET6 + return socket.AF_INET + + +def get_sockaddr( + host: str, port: int, family: socket.AddressFamily +) -> t.Union[t.Tuple[str, int], str]: + """Return a fully qualified socket address that can be passed to + :func:`socket.bind`.""" + if family == af_unix: + return host.split("://", 1)[1] + try: + res = socket.getaddrinfo( + host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP + ) + except socket.gaierror: + return host, port + return res[0][4] # type: ignore + + +def get_interface_ip(family: socket.AddressFamily) -> str: + """Get the IP address of an external interface. Used when binding to + 0.0.0.0 or ::1 to show a more useful URL. + + :meta private: + """ + # arbitrary private address + host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219" + + with socket.socket(family, socket.SOCK_DGRAM) as s: + try: + s.connect((host, 58162)) + except OSError: + return "::1" if family == socket.AF_INET6 else "127.0.0.1" + + return s.getsockname()[0] # type: ignore + + +class BaseWSGIServer(HTTPServer): + + """Simple single-threaded, single-process WSGI server.""" + + multithread = False + multiprocess = False + request_queue_size = LISTEN_QUEUE + + def __init__( + self, + host: str, + port: int, + app: "WSGIApplication", + handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, + ) -> None: + if handler is None: + handler = WSGIRequestHandler + + self.address_family = select_address_family(host, port) + + if fd is not None: + real_sock = socket.fromfd(fd, self.address_family, socket.SOCK_STREAM) + port = 0 + + server_address = get_sockaddr(host, int(port), self.address_family) + + # remove socket file if it already exists + if self.address_family == af_unix: + server_address = t.cast(str, server_address) + + if os.path.exists(server_address): + os.unlink(server_address) + + super().__init__(server_address, handler) # type: ignore + + self.app = app + self.passthrough_errors = passthrough_errors + self.shutdown_signal = False + self.host = host + self.port = self.socket.getsockname()[1] + + # Patch in the original socket. + if fd is not None: + self.socket.close() + self.socket = real_sock + self.server_address = self.socket.getsockname() + + if ssl_context is not None: + if isinstance(ssl_context, tuple): + ssl_context = load_ssl_context(*ssl_context) + if ssl_context == "adhoc": + ssl_context = generate_adhoc_ssl_context() + + self.socket = ssl_context.wrap_socket(self.socket, server_side=True) + self.ssl_context: t.Optional["ssl.SSLContext"] = ssl_context + else: + self.ssl_context = None + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log(type, message, *args) + + def serve_forever(self, poll_interval: float = 0.5) -> None: + self.shutdown_signal = False + try: + super().serve_forever(poll_interval=poll_interval) + except KeyboardInterrupt: + pass + finally: + self.server_close() + + def handle_error(self, request: t.Any, client_address: t.Tuple[str, int]) -> None: + if self.passthrough_errors: + raise + + return super().handle_error(request, client_address) + + +class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer): + + """A WSGI server that does threading.""" + + multithread = True + daemon_threads = True + + +class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): + + """A WSGI server that does forking.""" + + multiprocess = True + + def __init__( + self, + host: str, + port: int, + app: "WSGIApplication", + processes: int = 40, + handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, + ) -> None: + if not can_fork: + raise ValueError("Your platform does not support forking.") + BaseWSGIServer.__init__( + self, host, port, app, handler, passthrough_errors, ssl_context, fd + ) + self.max_children = processes + + +def make_server( + host: str, + port: int, + app: "WSGIApplication", + threaded: bool = False, + processes: int = 1, + request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, + fd: t.Optional[int] = None, +) -> BaseWSGIServer: + """Create a new server instance that is either threaded, or forks + or just processes one request after another. + """ + if threaded and processes > 1: + raise ValueError("cannot have a multithreaded and multi process server.") + elif threaded: + return ThreadedWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + elif processes > 1: + return ForkingWSGIServer( + host, + port, + app, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + else: + return BaseWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + + +def is_running_from_reloader() -> bool: + """Checks if the application is running from within the Werkzeug + reloader subprocess. + + .. versionadded:: 0.10 + """ + return os.environ.get("WERKZEUG_RUN_MAIN") == "true" + + +def run_simple( + hostname: str, + port: int, + application: "WSGIApplication", + use_reloader: bool = False, + use_debugger: bool = False, + use_evalex: bool = True, + extra_files: t.Optional[t.Iterable[str]] = None, + exclude_patterns: t.Optional[t.Iterable[str]] = None, + reloader_interval: int = 1, + reloader_type: str = "auto", + threaded: bool = False, + processes: int = 1, + request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, + static_files: t.Optional[t.Dict[str, t.Union[str, t.Tuple[str, str]]]] = None, + passthrough_errors: bool = False, + ssl_context: t.Optional[_TSSLContextArg] = None, +) -> None: + """Start a WSGI application. Optional features include a reloader, + multithreading and fork support. + + This function has a command-line interface too:: + + python -m werkzeug.serving --help + + .. versionchanged:: 2.0 + Added ``exclude_patterns`` parameter. + + .. versionadded:: 0.5 + `static_files` was added to simplify serving of static files as well + as `passthrough_errors`. + + .. versionadded:: 0.6 + support for SSL was added. + + .. versionadded:: 0.8 + Added support for automatically loading a SSL context from certificate + file and private key. + + .. versionadded:: 0.9 + Added command-line interface. + + .. versionadded:: 0.10 + Improved the reloader and added support for changing the backend + through the `reloader_type` parameter. See :ref:`reloader` + for more information. + + .. versionchanged:: 0.15 + Bind to a Unix socket by passing a path that starts with + ``unix://`` as the ``hostname``. + + :param hostname: The host to bind to, for example ``'localhost'``. + If the value is a path that starts with ``unix://`` it will bind + to a Unix socket instead of a TCP socket.. + :param port: The port for the server. eg: ``8080`` + :param application: the WSGI application to execute + :param use_reloader: should the server automatically restart the python + process if modules were changed? + :param use_debugger: should the werkzeug debugging system be used? + :param use_evalex: should the exception evaluation feature be enabled? + :param extra_files: a list of files the reloader should watch + additionally to the modules. For example configuration + files. + :param exclude_patterns: List of :mod:`fnmatch` patterns to ignore + when running the reloader. For example, ignore cache files that + shouldn't reload when updated. + :param reloader_interval: the interval for the reloader in seconds. + :param reloader_type: the type of reloader to use. The default is + auto detection. Valid values are ``'stat'`` and + ``'watchdog'``. See :ref:`reloader` for more + information. + :param threaded: should the process handle each request in a separate + thread? + :param processes: if greater than 1 then handle each request in a new process + up to this maximum number of concurrent processes. + :param request_handler: optional parameter that can be used to replace + the default one. You can use this to replace it + with a different + :class:`~BaseHTTPServer.BaseHTTPRequestHandler` + subclass. + :param static_files: a list or dict of paths for static files. This works + exactly like :class:`SharedDataMiddleware`, it's actually + just wrapping the application in that middleware before + serving. + :param passthrough_errors: set this to `True` to disable the error catching. + This means that the server will die on errors but + it can be useful to hook debuggers in (pdb etc.) + :param ssl_context: an SSL context for the connection. Either an + :class:`ssl.SSLContext`, a tuple in the form + ``(cert_file, pkey_file)``, the string ``'adhoc'`` if + the server should automatically create one, or ``None`` + to disable SSL (which is the default). + """ + if not isinstance(port, int): + raise TypeError("port must be an integer") + if use_debugger: + from .debug import DebuggedApplication + + application = DebuggedApplication(application, use_evalex) + if static_files: + from .middleware.shared_data import SharedDataMiddleware + + application = SharedDataMiddleware(application, static_files) + + def log_startup(sock: socket.socket) -> None: + all_addresses_message = ( + " * Running on all addresses.\n" + " WARNING: This is a development server. Do not use it in" + " a production deployment." + ) + + if sock.family == af_unix: + _log("info", " * Running on %s (Press CTRL+C to quit)", hostname) + else: + if hostname == "0.0.0.0": + _log("warning", all_addresses_message) + display_hostname = get_interface_ip(socket.AF_INET) + elif hostname == "::": + _log("warning", all_addresses_message) + display_hostname = get_interface_ip(socket.AF_INET6) + else: + display_hostname = hostname + + if ":" in display_hostname: + display_hostname = f"[{display_hostname}]" + + _log( + "info", + " * Running on %s://%s:%d/ (Press CTRL+C to quit)", + "http" if ssl_context is None else "https", + display_hostname, + sock.getsockname()[1], + ) + + def inner() -> None: + try: + fd: t.Optional[int] = int(os.environ["WERKZEUG_SERVER_FD"]) + except (LookupError, ValueError): + fd = None + srv = make_server( + hostname, + port, + application, + threaded, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + if fd is None: + log_startup(srv.socket) + srv.serve_forever() + + if use_reloader: + # If we're not running already in the subprocess that is the + # reloader we want to open up a socket early to make sure the + # port is actually available. + if not is_running_from_reloader(): + if port == 0 and not can_open_by_fd: + raise ValueError( + "Cannot bind to a random port with enabled " + "reloader if the Python interpreter does " + "not support socket opening by fd." + ) + + # Create and destroy a socket so that any exceptions are + # raised before we spawn a separate Python interpreter and + # lose this ability. + address_family = select_address_family(hostname, port) + server_address = get_sockaddr(hostname, port, address_family) + s = socket.socket(address_family, socket.SOCK_STREAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind(server_address) + s.set_inheritable(True) + + # If we can open the socket by file descriptor, then we can just + # reuse this one and our socket will survive the restarts. + if can_open_by_fd: + os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno()) + s.listen(LISTEN_QUEUE) + log_startup(s) + else: + s.close() + if address_family == af_unix: + server_address = t.cast(str, server_address) + _log("info", "Unlinking %s", server_address) + os.unlink(server_address) + + from ._reloader import run_with_reloader as _rwr + + _rwr( + inner, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + interval=reloader_interval, + reloader_type=reloader_type, + ) + else: + inner() + + +def run_with_reloader(*args: t.Any, **kwargs: t.Any) -> None: + """Run a process with the reloader. This is not a public API, do + not use this function. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. + """ + from ._reloader import run_with_reloader as _rwr + + warnings.warn( + ( + "'run_with_reloader' is a private API, it will no longer be" + " accessible in Werkzeug 2.1. Use 'run_simple' instead." + ), + DeprecationWarning, + stacklevel=2, + ) + _rwr(*args, **kwargs) + + +def main() -> None: + """A simple command-line interface for :py:func:`run_simple`.""" + import argparse + from .utils import import_string + + _log("warning", "This CLI is deprecated and will be removed in version 2.1.") + + parser = argparse.ArgumentParser( + description="Run the given WSGI application with the development server.", + allow_abbrev=False, + ) + parser.add_argument( + "-b", + "--bind", + dest="address", + help="The hostname:port the app should listen on.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + help="Show the interactive debugger for unhandled exceptions.", + ) + parser.add_argument( + "-r", + "--reload", + action="store_true", + help="Reload the process if modules change.", + ) + parser.add_argument( + "application", help="Application to import and serve, in the form module:app." + ) + args = parser.parse_args() + hostname, port = None, None + + if args.address: + hostname, _, port = args.address.partition(":") + + run_simple( + hostname=hostname or "127.0.0.1", + port=int(port or 5000), + application=import_string(args.application), + use_reloader=args.reload, + use_debugger=args.debug, + ) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.8/site-packages/werkzeug/test.py b/venv/lib/python3.8/site-packages/werkzeug/test.py new file mode 100644 index 0000000..09cb7e8 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/test.py @@ -0,0 +1,1326 @@ +import mimetypes +import sys +import typing as t +import warnings +from collections import defaultdict +from datetime import datetime +from datetime import timedelta +from http.cookiejar import CookieJar +from io import BytesIO +from itertools import chain +from random import random +from tempfile import TemporaryFile +from time import time +from urllib.request import Request as _UrllibRequest + +from ._internal import _get_environ +from ._internal import _make_encode_wrapper +from ._internal import _wsgi_decoding_dance +from ._internal import _wsgi_encoding_dance +from .datastructures import Authorization +from .datastructures import CallbackDict +from .datastructures import CombinedMultiDict +from .datastructures import EnvironHeaders +from .datastructures import FileMultiDict +from .datastructures import Headers +from .datastructures import MultiDict +from .http import dump_cookie +from .http import dump_options_header +from .http import parse_options_header +from .sansio.multipart import Data +from .sansio.multipart import Epilogue +from .sansio.multipart import Field +from .sansio.multipart import File +from .sansio.multipart import MultipartEncoder +from .sansio.multipart import Preamble +from .urls import iri_to_uri +from .urls import url_encode +from .urls import url_fix +from .urls import url_parse +from .urls import url_unparse +from .urls import url_unquote +from .utils import get_content_type +from .wrappers.request import Request +from .wrappers.response import Response +from .wsgi import ClosingIterator +from .wsgi import get_current_url + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def stream_encode_multipart( + data: t.Mapping[str, t.Any], + use_tempfile: bool = True, + threshold: int = 1024 * 500, + boundary: t.Optional[str] = None, + charset: str = "utf-8", +) -> t.Tuple[t.IO[bytes], int, str]: + """Encode a dict of values (either strings or file descriptors or + :class:`FileStorage` objects.) into a multipart encoded string stored + in a file descriptor. + """ + if boundary is None: + boundary = f"---------------WerkzeugFormPart_{time()}{random()}" + + stream: t.IO[bytes] = BytesIO() + total_length = 0 + on_disk = False + + if use_tempfile: + + def write_binary(s: bytes) -> int: + nonlocal stream, total_length, on_disk + + if on_disk: + return stream.write(s) + else: + length = len(s) + + if length + total_length <= threshold: + stream.write(s) + else: + new_stream = t.cast(t.IO[bytes], TemporaryFile("wb+")) + new_stream.write(stream.getvalue()) # type: ignore + new_stream.write(s) + stream = new_stream + on_disk = True + + total_length += length + return length + + else: + write_binary = stream.write + + encoder = MultipartEncoder(boundary.encode()) + write_binary(encoder.send_event(Preamble(data=b""))) + for key, value in _iter_data(data): + reader = getattr(value, "read", None) + if reader is not None: + filename = getattr(value, "filename", getattr(value, "name", None)) + content_type = getattr(value, "content_type", None) + if content_type is None: + content_type = ( + filename + and mimetypes.guess_type(filename)[0] + or "application/octet-stream" + ) + headers = Headers([("Content-Type", content_type)]) + if filename is None: + write_binary(encoder.send_event(Field(name=key, headers=headers))) + else: + write_binary( + encoder.send_event( + File(name=key, filename=filename, headers=headers) + ) + ) + while True: + chunk = reader(16384) + + if not chunk: + break + + write_binary(encoder.send_event(Data(data=chunk, more_data=True))) + else: + if not isinstance(value, str): + value = str(value) + write_binary(encoder.send_event(Field(name=key, headers=Headers()))) + write_binary( + encoder.send_event(Data(data=value.encode(charset), more_data=False)) + ) + + write_binary(encoder.send_event(Epilogue(data=b""))) + + length = stream.tell() + stream.seek(0) + return stream, length, boundary + + +def encode_multipart( + values: t.Mapping[str, t.Any], + boundary: t.Optional[str] = None, + charset: str = "utf-8", +) -> t.Tuple[str, bytes]: + """Like `stream_encode_multipart` but returns a tuple in the form + (``boundary``, ``data``) where data is bytes. + """ + stream, length, boundary = stream_encode_multipart( + values, use_tempfile=False, boundary=boundary, charset=charset + ) + return boundary, stream.read() + + +class _TestCookieHeaders: + """A headers adapter for cookielib""" + + def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: + self.headers = headers + + def getheaders(self, name: str) -> t.Iterable[str]: + headers = [] + name = name.lower() + for k, v in self.headers: + if k.lower() == name: + headers.append(v) + return headers + + def get_all( + self, name: str, default: t.Optional[t.Iterable[str]] = None + ) -> t.Iterable[str]: + headers = self.getheaders(name) + + if not headers: + return default # type: ignore + + return headers + + +class _TestCookieResponse: + """Something that looks like a httplib.HTTPResponse, but is actually just an + adapter for our test responses to make them available for cookielib. + """ + + def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: + self.headers = _TestCookieHeaders(headers) + + def info(self) -> _TestCookieHeaders: + return self.headers + + +class _TestCookieJar(CookieJar): + """A cookielib.CookieJar modified to inject and read cookie headers from + and to wsgi environments, and wsgi application responses. + """ + + def inject_wsgi(self, environ: "WSGIEnvironment") -> None: + """Inject the cookies as client headers into the server's wsgi + environment. + """ + cvals = [f"{c.name}={c.value}" for c in self] + + if cvals: + environ["HTTP_COOKIE"] = "; ".join(cvals) + else: + environ.pop("HTTP_COOKIE", None) + + def extract_wsgi( + self, + environ: "WSGIEnvironment", + headers: t.Union[Headers, t.List[t.Tuple[str, str]]], + ) -> None: + """Extract the server's set-cookie headers as cookies into the + cookie jar. + """ + self.extract_cookies( + _TestCookieResponse(headers), # type: ignore + _UrllibRequest(get_current_url(environ)), + ) + + +def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[t.Tuple[str, t.Any]]: + """Iterate over a mapping that might have a list of values, yielding + all key, value pairs. Almost like iter_multi_items but only allows + lists, not tuples, of values so tuples can be used for files. + """ + if isinstance(data, MultiDict): + yield from data.items(multi=True) + else: + for key, value in data.items(): + if isinstance(value, list): + for v in value: + yield key, v + else: + yield key, value + + +_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) + + +class EnvironBuilder: + """This class can be used to conveniently create a WSGI environment + for testing purposes. It can be used to quickly create WSGI environments + or request objects from arbitrary data. + + The signature of this class is also used in some other places as of + Werkzeug 0.5 (:func:`create_environ`, :meth:`Response.from_values`, + :meth:`Client.open`). Because of this most of the functionality is + available through the constructor alone. + + Files and regular form data can be manipulated independently of each + other with the :attr:`form` and :attr:`files` attributes, but are + passed with the same argument to the constructor: `data`. + + `data` can be any of these values: + + - a `str` or `bytes` object: The object is converted into an + :attr:`input_stream`, the :attr:`content_length` is set and you have to + provide a :attr:`content_type`. + - a `dict` or :class:`MultiDict`: The keys have to be strings. The values + have to be either any of the following objects, or a list of any of the + following objects: + + - a :class:`file`-like object: These are converted into + :class:`FileStorage` objects automatically. + - a `tuple`: The :meth:`~FileMultiDict.add_file` method is called + with the key and the unpacked `tuple` items as positional + arguments. + - a `str`: The string is set as form data for the associated key. + - a file-like object: The object content is loaded in memory and then + handled like a regular `str` or a `bytes`. + + :param path: the path of the request. In the WSGI environment this will + end up as `PATH_INFO`. If the `query_string` is not defined + and there is a question mark in the `path` everything after + it is used as query string. + :param base_url: the base URL is a URL that is used to extract the WSGI + URL scheme, host (server name + server port) and the + script root (`SCRIPT_NAME`). + :param query_string: an optional string or dict with URL parameters. + :param method: the HTTP method to use, defaults to `GET`. + :param input_stream: an optional input stream. Do not specify this and + `data`. As soon as an input stream is set you can't + modify :attr:`args` and :attr:`files` unless you + set the :attr:`input_stream` to `None` again. + :param content_type: The content type for the request. As of 0.5 you + don't have to provide this when specifying files + and form data via `data`. + :param content_length: The content length for the request. You don't + have to specify this when providing data via + `data`. + :param errors_stream: an optional error stream that is used for + `wsgi.errors`. Defaults to :data:`stderr`. + :param multithread: controls `wsgi.multithread`. Defaults to `False`. + :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. + :param run_once: controls `wsgi.run_once`. Defaults to `False`. + :param headers: an optional list or :class:`Headers` object of headers. + :param data: a string or dict of form data or a file-object. + See explanation above. + :param json: An object to be serialized and assigned to ``data``. + Defaults the content type to ``"application/json"``. + Serialized with the function assigned to :attr:`json_dumps`. + :param environ_base: an optional dict of environment defaults. + :param environ_overrides: an optional dict of environment overrides. + :param charset: the charset used to encode string data. + :param auth: An authorization object to use for the + ``Authorization`` header value. A ``(username, password)`` tuple + is a shortcut for ``Basic`` authorization. + + .. versionchanged:: 2.0 + ``REQUEST_URI`` and ``RAW_URI`` is the full raw URI including + the query string, not only the path. + + .. versionchanged:: 2.0 + The default :attr:`request_class` is ``Request`` instead of + ``BaseRequest``. + + .. versionadded:: 2.0 + Added the ``auth`` parameter. + + .. versionadded:: 0.15 + The ``json`` param and :meth:`json_dumps` method. + + .. versionadded:: 0.15 + The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing + the path before perecent-decoding. This is not part of the WSGI + PEP, but many WSGI servers include it. + + .. versionchanged:: 0.6 + ``path`` and ``base_url`` can now be unicode strings that are + encoded with :func:`iri_to_uri`. + """ + + #: the server protocol to use. defaults to HTTP/1.1 + server_protocol = "HTTP/1.1" + + #: the wsgi version to use. defaults to (1, 0) + wsgi_version = (1, 0) + + #: The default request class used by :meth:`get_request`. + request_class = Request + + import json + + #: The serialization function used when ``json`` is passed. + json_dumps = staticmethod(json.dumps) + del json + + _args: t.Optional[MultiDict] + _query_string: t.Optional[str] + _input_stream: t.Optional[t.IO[bytes]] + _form: t.Optional[MultiDict] + _files: t.Optional[FileMultiDict] + + def __init__( + self, + path: str = "/", + base_url: t.Optional[str] = None, + query_string: t.Optional[t.Union[t.Mapping[str, str], str]] = None, + method: str = "GET", + input_stream: t.Optional[t.IO[bytes]] = None, + content_type: t.Optional[str] = None, + content_length: t.Optional[int] = None, + errors_stream: t.Optional[t.IO[str]] = None, + multithread: bool = False, + multiprocess: bool = False, + run_once: bool = False, + headers: t.Optional[t.Union[Headers, t.Iterable[t.Tuple[str, str]]]] = None, + data: t.Optional[ + t.Union[t.IO[bytes], str, bytes, t.Mapping[str, t.Any]] + ] = None, + environ_base: t.Optional[t.Mapping[str, t.Any]] = None, + environ_overrides: t.Optional[t.Mapping[str, t.Any]] = None, + charset: str = "utf-8", + mimetype: t.Optional[str] = None, + json: t.Optional[t.Mapping[str, t.Any]] = None, + auth: t.Optional[t.Union[Authorization, t.Tuple[str, str]]] = None, + ) -> None: + path_s = _make_encode_wrapper(path) + if query_string is not None and path_s("?") in path: + raise ValueError("Query string is defined in the path and as an argument") + request_uri = url_parse(path) + if query_string is None and path_s("?") in path: + query_string = request_uri.query + self.charset = charset + self.path = iri_to_uri(request_uri.path) + self.request_uri = path + if base_url is not None: + base_url = url_fix(iri_to_uri(base_url, charset), charset) + self.base_url = base_url # type: ignore + if isinstance(query_string, (bytes, str)): + self.query_string = query_string + else: + if query_string is None: + query_string = MultiDict() + elif not isinstance(query_string, MultiDict): + query_string = MultiDict(query_string) + self.args = query_string + self.method = method + if headers is None: + headers = Headers() + elif not isinstance(headers, Headers): + headers = Headers(headers) + self.headers = headers + if content_type is not None: + self.content_type = content_type + if errors_stream is None: + errors_stream = sys.stderr + self.errors_stream = errors_stream + self.multithread = multithread + self.multiprocess = multiprocess + self.run_once = run_once + self.environ_base = environ_base + self.environ_overrides = environ_overrides + self.input_stream = input_stream + self.content_length = content_length + self.closed = False + + if auth is not None: + if isinstance(auth, tuple): + auth = Authorization( + "basic", {"username": auth[0], "password": auth[1]} + ) + + self.headers.set("Authorization", auth.to_header()) + + if json is not None: + if data is not None: + raise TypeError("can't provide both json and data") + + data = self.json_dumps(json) + + if self.content_type is None: + self.content_type = "application/json" + + if data: + if input_stream is not None: + raise TypeError("can't provide input stream and data") + if hasattr(data, "read"): + data = data.read() # type: ignore + if isinstance(data, str): + data = data.encode(self.charset) + if isinstance(data, bytes): + self.input_stream = BytesIO(data) + if self.content_length is None: + self.content_length = len(data) + else: + for key, value in _iter_data(data): # type: ignore + if isinstance(value, (tuple, dict)) or hasattr(value, "read"): + self._add_file_from_data(key, value) + else: + self.form.setlistdefault(key).append(value) + + if mimetype is not None: + self.mimetype = mimetype + + @classmethod + def from_environ( + cls, environ: "WSGIEnvironment", **kwargs: t.Any + ) -> "EnvironBuilder": + """Turn an environ dict back into a builder. Any extra kwargs + override the args extracted from the environ. + + .. versionchanged:: 2.0 + Path and query values are passed through the WSGI decoding + dance to avoid double encoding. + + .. versionadded:: 0.15 + """ + headers = Headers(EnvironHeaders(environ)) + out = { + "path": _wsgi_decoding_dance(environ["PATH_INFO"]), + "base_url": cls._make_base_url( + environ["wsgi.url_scheme"], + headers.pop("Host"), + _wsgi_decoding_dance(environ["SCRIPT_NAME"]), + ), + "query_string": _wsgi_decoding_dance(environ["QUERY_STRING"]), + "method": environ["REQUEST_METHOD"], + "input_stream": environ["wsgi.input"], + "content_type": headers.pop("Content-Type", None), + "content_length": headers.pop("Content-Length", None), + "errors_stream": environ["wsgi.errors"], + "multithread": environ["wsgi.multithread"], + "multiprocess": environ["wsgi.multiprocess"], + "run_once": environ["wsgi.run_once"], + "headers": headers, + } + out.update(kwargs) + return cls(**out) + + def _add_file_from_data( + self, + key: str, + value: t.Union[ + t.IO[bytes], t.Tuple[t.IO[bytes], str], t.Tuple[t.IO[bytes], str, str] + ], + ) -> None: + """Called in the EnvironBuilder to add files from the data dict.""" + if isinstance(value, tuple): + self.files.add_file(key, *value) + else: + self.files.add_file(key, value) + + @staticmethod + def _make_base_url(scheme: str, host: str, script_root: str) -> str: + return url_unparse((scheme, host, script_root, "", "")).rstrip("/") + "/" + + @property + def base_url(self) -> str: + """The base URL is used to extract the URL scheme, host name, + port, and root path. + """ + return self._make_base_url(self.url_scheme, self.host, self.script_root) + + @base_url.setter + def base_url(self, value: t.Optional[str]) -> None: + if value is None: + scheme = "http" + netloc = "localhost" + script_root = "" + else: + scheme, netloc, script_root, qs, anchor = url_parse(value) + if qs or anchor: + raise ValueError("base url must not contain a query string or fragment") + self.script_root = script_root.rstrip("/") + self.host = netloc + self.url_scheme = scheme + + @property + def content_type(self) -> t.Optional[str]: + """The content type for the request. Reflected from and to + the :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + ct = self.headers.get("Content-Type") + if ct is None and not self._input_stream: + if self._files: + return "multipart/form-data" + if self._form: + return "application/x-www-form-urlencoded" + return None + return ct + + @content_type.setter + def content_type(self, value: t.Optional[str]) -> None: + if value is None: + self.headers.pop("Content-Type", None) + else: + self.headers["Content-Type"] = value + + @property + def mimetype(self) -> t.Optional[str]: + """The mimetype (content type without charset etc.) + + .. versionadded:: 0.14 + """ + ct = self.content_type + return ct.split(";")[0].strip() if ct else None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.content_type = get_content_type(value, self.charset) + + @property + def mimetype_params(self) -> t.Mapping[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.14 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + @property + def content_length(self) -> t.Optional[int]: + """The content length as integer. Reflected from and to the + :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + return self.headers.get("Content-Length", type=int) + + @content_length.setter + def content_length(self, value: t.Optional[int]) -> None: + if value is None: + self.headers.pop("Content-Length", None) + else: + self.headers["Content-Length"] = str(value) + + def _get_form(self, name: str, storage: t.Type[_TAnyMultiDict]) -> _TAnyMultiDict: + """Common behavior for getting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param storage: Storage class used for the data. + """ + if self.input_stream is not None: + raise AttributeError("an input stream is defined") + + rv = getattr(self, name) + + if rv is None: + rv = storage() + setattr(self, name, rv) + + return rv # type: ignore + + def _set_form(self, name: str, value: MultiDict) -> None: + """Common behavior for setting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param value: Value to assign to the attribute. + """ + self._input_stream = None + setattr(self, name, value) + + @property + def form(self) -> MultiDict: + """A :class:`MultiDict` of form values.""" + return self._get_form("_form", MultiDict) + + @form.setter + def form(self, value: MultiDict) -> None: + self._set_form("_form", value) + + @property + def files(self) -> FileMultiDict: + """A :class:`FileMultiDict` of uploaded files. Use + :meth:`~FileMultiDict.add_file` to add new files. + """ + return self._get_form("_files", FileMultiDict) + + @files.setter + def files(self, value: FileMultiDict) -> None: + self._set_form("_files", value) + + @property + def input_stream(self) -> t.Optional[t.IO[bytes]]: + """An optional input stream. This is mutually exclusive with + setting :attr:`form` and :attr:`files`, setting it will clear + those. Do not provide this if the method is not ``POST`` or + another method that has a body. + """ + return self._input_stream + + @input_stream.setter + def input_stream(self, value: t.Optional[t.IO[bytes]]) -> None: + self._input_stream = value + self._form = None + self._files = None + + @property + def query_string(self) -> str: + """The query string. If you set this to a string + :attr:`args` will no longer be available. + """ + if self._query_string is None: + if self._args is not None: + return url_encode(self._args, charset=self.charset) + return "" + return self._query_string + + @query_string.setter + def query_string(self, value: t.Optional[str]) -> None: + self._query_string = value + self._args = None + + @property + def args(self) -> MultiDict: + """The URL arguments as :class:`MultiDict`.""" + if self._query_string is not None: + raise AttributeError("a query string is defined") + if self._args is None: + self._args = MultiDict() + return self._args + + @args.setter + def args(self, value: t.Optional[MultiDict]) -> None: + self._query_string = None + self._args = value + + @property + def server_name(self) -> str: + """The server name (read-only, use :attr:`host` to set)""" + return self.host.split(":", 1)[0] + + @property + def server_port(self) -> int: + """The server port as integer (read-only, use :attr:`host` to set)""" + pieces = self.host.split(":", 1) + if len(pieces) == 2 and pieces[1].isdigit(): + return int(pieces[1]) + if self.url_scheme == "https": + return 443 + return 80 + + def __del__(self) -> None: + try: + self.close() + except Exception: + pass + + def close(self) -> None: + """Closes all files. If you put real :class:`file` objects into the + :attr:`files` dict you can call this method to automatically close + them all in one go. + """ + if self.closed: + return + try: + files = self.files.values() + except AttributeError: + files = () # type: ignore + for f in files: + try: + f.close() + except Exception: + pass + self.closed = True + + def get_environ(self) -> "WSGIEnvironment": + """Return the built environ. + + .. versionchanged:: 0.15 + The content type and length headers are set based on + input stream detection. Previously this only set the WSGI + keys. + """ + input_stream = self.input_stream + content_length = self.content_length + + mimetype = self.mimetype + content_type = self.content_type + + if input_stream is not None: + start_pos = input_stream.tell() + input_stream.seek(0, 2) + end_pos = input_stream.tell() + input_stream.seek(start_pos) + content_length = end_pos - start_pos + elif mimetype == "multipart/form-data": + input_stream, content_length, boundary = stream_encode_multipart( + CombinedMultiDict([self.form, self.files]), charset=self.charset + ) + content_type = f'{mimetype}; boundary="{boundary}"' + elif mimetype == "application/x-www-form-urlencoded": + form_encoded = url_encode(self.form, charset=self.charset).encode("ascii") + content_length = len(form_encoded) + input_stream = BytesIO(form_encoded) + else: + input_stream = BytesIO() + + result: "WSGIEnvironment" = {} + if self.environ_base: + result.update(self.environ_base) + + def _path_encode(x: str) -> str: + return _wsgi_encoding_dance(url_unquote(x, self.charset), self.charset) + + raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset) + result.update( + { + "REQUEST_METHOD": self.method, + "SCRIPT_NAME": _path_encode(self.script_root), + "PATH_INFO": _path_encode(self.path), + "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": raw_uri, + # Non-standard, added by gunicorn + "RAW_URI": raw_uri, + "SERVER_NAME": self.server_name, + "SERVER_PORT": str(self.server_port), + "HTTP_HOST": self.host, + "SERVER_PROTOCOL": self.server_protocol, + "wsgi.version": self.wsgi_version, + "wsgi.url_scheme": self.url_scheme, + "wsgi.input": input_stream, + "wsgi.errors": self.errors_stream, + "wsgi.multithread": self.multithread, + "wsgi.multiprocess": self.multiprocess, + "wsgi.run_once": self.run_once, + } + ) + + headers = self.headers.copy() + + if content_type is not None: + result["CONTENT_TYPE"] = content_type + headers.set("Content-Type", content_type) + + if content_length is not None: + result["CONTENT_LENGTH"] = str(content_length) + headers.set("Content-Length", content_length) + + combined_headers = defaultdict(list) + + for key, value in headers.to_wsgi_list(): + combined_headers[f"HTTP_{key.upper().replace('-', '_')}"].append(value) + + for key, values in combined_headers.items(): + result[key] = ", ".join(values) + + if self.environ_overrides: + result.update(self.environ_overrides) + + return result + + def get_request(self, cls: t.Optional[t.Type[Request]] = None) -> Request: + """Returns a request with the data. If the request class is not + specified :attr:`request_class` is used. + + :param cls: The request wrapper to use. + """ + if cls is None: + cls = self.request_class + + return cls(self.get_environ()) + + +class ClientRedirectError(Exception): + """If a redirect loop is detected when using follow_redirects=True with + the :cls:`Client`, then this exception is raised. + """ + + +class Client: + """This class allows you to send requests to a wrapped application. + + The use_cookies parameter indicates whether cookies should be stored and + sent for subsequent requests. This is True by default, but passing False + will disable this behaviour. + + If you want to request some subdomain of your application you may set + `allow_subdomain_redirects` to `True` as if not no external redirects + are allowed. + + .. versionchanged:: 2.0 + ``response_wrapper`` is always a subclass of + :class:``TestResponse``. + + .. versionchanged:: 0.5 + Added the ``use_cookies`` parameter. + """ + + def __init__( + self, + application: "WSGIApplication", + response_wrapper: t.Optional[t.Type["Response"]] = None, + use_cookies: bool = True, + allow_subdomain_redirects: bool = False, + ) -> None: + self.application = application + + if response_wrapper in {None, Response}: + response_wrapper = TestResponse + elif not isinstance(response_wrapper, TestResponse): + response_wrapper = type( + "WrapperTestResponse", + (TestResponse, response_wrapper), # type: ignore + {}, + ) + + self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper) + + if use_cookies: + self.cookie_jar: t.Optional[_TestCookieJar] = _TestCookieJar() + else: + self.cookie_jar = None + + self.allow_subdomain_redirects = allow_subdomain_redirects + + def set_cookie( + self, + server_name: str, + key: str, + value: str = "", + max_age: t.Optional[t.Union[timedelta, int]] = None, + expires: t.Optional[t.Union[str, datetime, int, float]] = None, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + charset: str = "utf-8", + ) -> None: + """Sets a cookie in the client's cookie jar. The server name + is required and has to match the one that is also passed to + the open call. + """ + assert self.cookie_jar is not None, "cookies disabled" + header = dump_cookie( + key, + value, + max_age, + expires, + path, + domain, + secure, + httponly, + charset, + samesite=samesite, + ) + environ = create_environ(path, base_url=f"http://{server_name}") + headers = [("Set-Cookie", header)] + self.cookie_jar.extract_wsgi(environ, headers) + + def delete_cookie( + self, + server_name: str, + key: str, + path: str = "/", + domain: t.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: t.Optional[str] = None, + ) -> None: + """Deletes a cookie in the test client.""" + self.set_cookie( + server_name, + key, + expires=0, + max_age=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + def run_wsgi_app( + self, environ: "WSGIEnvironment", buffered: bool = False + ) -> t.Tuple[t.Iterable[bytes], str, Headers]: + """Runs the wrapped WSGI app with the given environment. + + :meta private: + """ + if self.cookie_jar is not None: + self.cookie_jar.inject_wsgi(environ) + + rv = run_wsgi_app(self.application, environ, buffered=buffered) + + if self.cookie_jar is not None: + self.cookie_jar.extract_wsgi(environ, rv[2]) + + return rv + + def resolve_redirect( + self, response: "TestResponse", buffered: bool = False + ) -> "TestResponse": + """Perform a new request to the location given by the redirect + response to the previous request. + + :meta private: + """ + scheme, netloc, path, qs, anchor = url_parse(response.location) + builder = EnvironBuilder.from_environ(response.request.environ, query_string=qs) + + to_name_parts = netloc.split(":", 1)[0].split(".") + from_name_parts = builder.server_name.split(".") + + if to_name_parts != [""]: + # The new location has a host, use it for the base URL. + builder.url_scheme = scheme + builder.host = netloc + else: + # A local redirect with autocorrect_location_header=False + # doesn't have a host, so use the request's host. + to_name_parts = from_name_parts + + # Explain why a redirect to a different server name won't be followed. + if to_name_parts != from_name_parts: + if to_name_parts[-len(from_name_parts) :] == from_name_parts: + if not self.allow_subdomain_redirects: + raise RuntimeError("Following subdomain redirects is not enabled.") + else: + raise RuntimeError("Following external redirects is not supported.") + + path_parts = path.split("/") + root_parts = builder.script_root.split("/") + + if path_parts[: len(root_parts)] == root_parts: + # Strip the script root from the path. + builder.path = path[len(builder.script_root) :] + else: + # The new location is not under the script root, so use the + # whole path and clear the previous root. + builder.path = path + builder.script_root = "" + + # Only 307 and 308 preserve all of the original request. + if response.status_code not in {307, 308}: + # HEAD is preserved, everything else becomes GET. + if builder.method != "HEAD": + builder.method = "GET" + + # Clear the body and the headers that describe it. + + if builder.input_stream is not None: + builder.input_stream.close() + builder.input_stream = None + + builder.content_type = None + builder.content_length = None + builder.headers.pop("Transfer-Encoding", None) + + return self.open(builder, buffered=buffered) + + def open( + self, + *args: t.Any, + as_tuple: bool = False, + buffered: bool = False, + follow_redirects: bool = False, + **kwargs: t.Any, + ) -> "TestResponse": + """Generate an environ dict from the given arguments, make a + request to the application using it, and return the response. + + :param args: Passed to :class:`EnvironBuilder` to create the + environ for the request. If a single arg is passed, it can + be an existing :class:`EnvironBuilder` or an environ dict. + :param buffered: Convert the iterator returned by the app into + a list. If the iterator has a ``close()`` method, it is + called automatically. + :param follow_redirects: Make additional requests to follow HTTP + redirects until a non-redirect status is returned. + :attr:`TestResponse.history` lists the intermediate + responses. + + .. versionchanged:: 2.0 + ``as_tuple`` is deprecated and will be removed in Werkzeug + 2.1. Use :attr:`TestResponse.request` and + ``request.environ`` instead. + + .. versionchanged:: 2.0 + The request input stream is closed when calling + ``response.close()``. Input streams for redirects are + automatically closed. + + .. versionchanged:: 0.5 + If a dict is provided as file in the dict for the ``data`` + parameter the content type has to be called ``content_type`` + instead of ``mimetype``. This change was made for + consistency with :class:`werkzeug.FileWrapper`. + + .. versionchanged:: 0.5 + Added the ``follow_redirects`` parameter. + """ + request: t.Optional["Request"] = None + + if not kwargs and len(args) == 1: + arg = args[0] + + if isinstance(arg, EnvironBuilder): + request = arg.get_request() + elif isinstance(arg, dict): + request = EnvironBuilder.from_environ(arg).get_request() + elif isinstance(arg, Request): + request = arg + + if request is None: + builder = EnvironBuilder(*args, **kwargs) + + try: + request = builder.get_request() + finally: + builder.close() + + response = self.run_wsgi_app(request.environ, buffered=buffered) + response = self.response_wrapper(*response, request=request) + + redirects = set() + history: t.List["TestResponse"] = [] + + while follow_redirects and response.status_code in { + 301, + 302, + 303, + 305, + 307, + 308, + }: + # Exhaust intermediate response bodies to ensure middleware + # that returns an iterator runs any cleanup code. + if not buffered: + response.make_sequence() + response.close() + + new_redirect_entry = (response.location, response.status_code) + + if new_redirect_entry in redirects: + raise ClientRedirectError( + f"Loop detected: A {response.status_code} redirect" + f" to {response.location} was already made." + ) + + redirects.add(new_redirect_entry) + response.history = tuple(history) + history.append(response) + response = self.resolve_redirect(response, buffered=buffered) + else: + # This is the final request after redirects, or not + # following redirects. + response.history = tuple(history) + # Close the input stream when closing the response, in case + # the input is an open temporary file. + response.call_on_close(request.input_stream.close) + + if as_tuple: + warnings.warn( + "'as_tuple' is deprecated and will be removed in" + " Werkzeug 2.1. Access 'response.request.environ'" + " instead.", + DeprecationWarning, + stacklevel=2, + ) + return request.environ, response # type: ignore + + return response + + def get(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``GET``.""" + kw["method"] = "GET" + return self.open(*args, **kw) + + def post(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``POST``.""" + kw["method"] = "POST" + return self.open(*args, **kw) + + def put(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``PUT``.""" + kw["method"] = "PUT" + return self.open(*args, **kw) + + def delete(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``DELETE``.""" + kw["method"] = "DELETE" + return self.open(*args, **kw) + + def patch(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``PATCH``.""" + kw["method"] = "PATCH" + return self.open(*args, **kw) + + def options(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``OPTIONS``.""" + kw["method"] = "OPTIONS" + return self.open(*args, **kw) + + def head(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``HEAD``.""" + kw["method"] = "HEAD" + return self.open(*args, **kw) + + def trace(self, *args: t.Any, **kw: t.Any) -> "TestResponse": + """Call :meth:`open` with ``method`` set to ``TRACE``.""" + kw["method"] = "TRACE" + return self.open(*args, **kw) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.application!r}>" + + +def create_environ(*args: t.Any, **kwargs: t.Any) -> "WSGIEnvironment": + """Create a new WSGI environ dict based on the values passed. The first + parameter should be the path of the request which defaults to '/'. The + second one can either be an absolute path (in that case the host is + localhost:80) or a full path to the request with scheme, netloc port and + the path to the script. + + This accepts the same arguments as the :class:`EnvironBuilder` + constructor. + + .. versionchanged:: 0.5 + This function is now a thin wrapper over :class:`EnvironBuilder` which + was added in 0.5. The `headers`, `environ_base`, `environ_overrides` + and `charset` parameters were added. + """ + builder = EnvironBuilder(*args, **kwargs) + + try: + return builder.get_environ() + finally: + builder.close() + + +def run_wsgi_app( + app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False +) -> t.Tuple[t.Iterable[bytes], str, Headers]: + """Return a tuple in the form (app_iter, status, headers) of the + application output. This works best if you pass it an application that + returns an iterator all the time. + + Sometimes applications may use the `write()` callable returned + by the `start_response` function. This tries to resolve such edge + cases automatically. But if you don't get the expected output you + should set `buffered` to `True` which enforces buffering. + + If passed an invalid WSGI application the behavior of this function is + undefined. Never pass non-conforming WSGI applications to this function. + + :param app: the application to execute. + :param buffered: set to `True` to enforce buffering. + :return: tuple in the form ``(app_iter, status, headers)`` + """ + # Copy environ to ensure any mutations by the app (ProxyFix, for + # example) don't affect subsequent requests (such as redirects). + environ = _get_environ(environ).copy() + status: str + response: t.Optional[t.Tuple[str, t.List[t.Tuple[str, str]]]] = None + buffer: t.List[bytes] = [] + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal response + + if exc_info: + try: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + + response = (status, headers) + return buffer.append + + app_rv = app(environ, start_response) + close_func = getattr(app_rv, "close", None) + app_iter: t.Iterable[bytes] = iter(app_rv) + + # when buffering we emit the close call early and convert the + # application iterator into a regular list + if buffered: + try: + app_iter = list(app_iter) + finally: + if close_func is not None: + close_func() + + # otherwise we iterate the application iter until we have a response, chain + # the already received data with the already collected data and wrap it in + # a new `ClosingIterator` if we need to restore a `close` callable from the + # original return value. + else: + for item in app_iter: + buffer.append(item) + + if response is not None: + break + + if buffer: + app_iter = chain(buffer, app_iter) + + if close_func is not None and app_iter is not app_rv: + app_iter = ClosingIterator(app_iter, close_func) + + status, headers = response # type: ignore + return app_iter, status, Headers(headers) + + +class TestResponse(Response): + """:class:`~werkzeug.wrappers.Response` subclass that provides extra + information about requests made with the test :class:`Client`. + + Test client requests will always return an instance of this class. + If a custom response class is passed to the client, it is + subclassed along with this to support test information. + + If the test request included large files, or if the application is + serving a file, call :meth:`close` to close any open files and + prevent Python showing a ``ResourceWarning``. + """ + + request: Request + """A request object with the environ used to make the request that + resulted in this response. + """ + + history: t.Tuple["TestResponse", ...] + """A list of intermediate responses. Populated when the test request + is made with ``follow_redirects`` enabled. + """ + + def __init__( + self, + response: t.Iterable[bytes], + status: str, + headers: Headers, + request: Request, + history: t.Tuple["TestResponse"] = (), # type: ignore + **kwargs: t.Any, + ) -> None: + super().__init__(response, status, headers, **kwargs) + self.request = request + self.history = history + self._compat_tuple = response, status, headers + + def __iter__(self) -> t.Iterator: + warnings.warn( + ( + "The test client no longer returns a tuple, it returns" + " a 'TestResponse'. Tuple unpacking is deprecated and" + " will be removed in Werkzeug 2.1. Access the" + " attributes 'data', 'status', and 'headers' instead." + ), + DeprecationWarning, + stacklevel=2, + ) + return iter(self._compat_tuple) + + def __getitem__(self, item: int) -> t.Any: + warnings.warn( + ( + "The test client no longer returns a tuple, it returns" + " a 'TestResponse'. Item indexing is deprecated and" + " will be removed in Werkzeug 2.1. Access the" + " attributes 'data', 'status', and 'headers' instead." + ), + DeprecationWarning, + stacklevel=2, + ) + return self._compat_tuple[item] diff --git a/venv/lib/python3.8/site-packages/werkzeug/testapp.py b/venv/lib/python3.8/site-packages/werkzeug/testapp.py new file mode 100644 index 0000000..981f887 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/testapp.py @@ -0,0 +1,240 @@ +"""A small application that can be used to test a WSGI server and check +it for WSGI compliance. +""" +import base64 +import os +import sys +import typing as t +from html import escape +from textwrap import wrap + +from . import __version__ as _werkzeug_version +from .wrappers.request import Request +from .wrappers.response import Response + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIEnvironment + + +logo = Response( + base64.b64decode( + """ +R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP///////// +//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv +nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25 +7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq +ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX +m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G +p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo +SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf +78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA +ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA +tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx +w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx +lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45 +Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB +yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd +dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r +idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh +EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8 +ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64 +gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C +JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y +Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9 +YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX +c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb +qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL +cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG +cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2 +KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe +EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb +UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB +Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z +aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn +kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs +=""" + ), + mimetype="image/png", +) + + +TEMPLATE = """\ + +WSGI Information + +
    + +

    WSGI Information

    +

    + This page displays all available information about the WSGI server and + the underlying Python interpreter. +

    Python Interpreter

    + + + + + + +
    Python Version + %(python_version)s +
    Platform + %(platform)s [%(os)s] +
    API Version + %(api_version)s +
    Byteorder + %(byteorder)s +
    Werkzeug Version + %(werkzeug_version)s +
    +

    WSGI Environment

    + %(wsgi_env)s
    +

    Installed Eggs

    +

    + The following python packages were installed on the system as + Python eggs: +

      %(python_eggs)s
    +

    System Path

    +

    + The following paths are the current contents of the load path. The + following entries are looked up for Python packages. Note that not + all items in this path are folders. Gray and underlined items are + entries pointing to invalid resources or used by custom import hooks + such as the zip importer. +

    + Items with a bright background were expanded for display from a relative + path. If you encounter such paths in the output you might want to check + your setup as relative paths are usually problematic in multithreaded + environments. +

      %(sys_path)s
    +
    +""" + + +def iter_sys_path() -> t.Iterator[t.Tuple[str, bool, bool]]: + if os.name == "posix": + + def strip(x: str) -> str: + prefix = os.path.expanduser("~") + if x.startswith(prefix): + x = f"~{x[len(prefix) :]}" + return x + + else: + + def strip(x: str) -> str: + return x + + cwd = os.path.abspath(os.getcwd()) + for item in sys.path: + path = os.path.join(cwd, item or os.path.curdir) + yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item + + +def render_testapp(req: Request) -> bytes: + try: + import pkg_resources + except ImportError: + eggs: t.Iterable[t.Any] = () + else: + eggs = sorted( + pkg_resources.working_set, + key=lambda x: x.project_name.lower(), # type: ignore + ) + python_eggs = [] + for egg in eggs: + try: + version = egg.version + except (ValueError, AttributeError): + version = "unknown" + python_eggs.append( + f"
  • {escape(egg.project_name)} [{escape(version)}]" + ) + + wsgi_env = [] + sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower()) + for key, value in sorted_environ: + value = "".join(wrap(escape(repr(value)))) + wsgi_env.append(f"{escape(str(key))}{value}") + + sys_path = [] + for item, virtual, expanded in iter_sys_path(): + class_ = [] + if virtual: + class_.append("virtual") + if expanded: + class_.append("exp") + class_ = f' class="{" ".join(class_)}"' if class_ else "" + sys_path.append(f"{escape(item)}") + + return ( + TEMPLATE + % { + "python_version": "
    ".join(escape(sys.version).splitlines()), + "platform": escape(sys.platform), + "os": escape(os.name), + "api_version": sys.api_version, + "byteorder": sys.byteorder, + "werkzeug_version": _werkzeug_version, + "python_eggs": "\n".join(python_eggs), + "wsgi_env": "\n".join(wsgi_env), + "sys_path": "\n".join(sys_path), + } + ).encode("utf-8") + + +def test_app( + environ: "WSGIEnvironment", start_response: "StartResponse" +) -> t.Iterable[bytes]: + """Simple test application that dumps the environment. You can use + it to check if Werkzeug is working properly: + + .. sourcecode:: pycon + + >>> from werkzeug.serving import run_simple + >>> from werkzeug.testapp import test_app + >>> run_simple('localhost', 3000, test_app) + * Running on http://localhost:3000/ + + The application displays important information from the WSGI environment, + the Python interpreter and the installed libraries. + """ + req = Request(environ, populate_request=False) + if req.args.get("resource") == "logo": + response = logo + else: + response = Response(render_testapp(req), mimetype="text/html") + return response(environ, start_response) + + +if __name__ == "__main__": + from .serving import run_simple + + run_simple("localhost", 5000, test_app, use_reloader=True) diff --git a/venv/lib/python3.8/site-packages/werkzeug/urls.py b/venv/lib/python3.8/site-packages/werkzeug/urls.py new file mode 100644 index 0000000..9529da0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/urls.py @@ -0,0 +1,1211 @@ +"""Functions for working with URLs. + +Contains implementations of functions from :mod:`urllib.parse` that +handle bytes and strings. +""" +import codecs +import os +import re +import typing as t +import warnings + +from ._internal import _check_str_tuple +from ._internal import _decode_idna +from ._internal import _encode_idna +from ._internal import _make_encode_wrapper +from ._internal import _to_str + +if t.TYPE_CHECKING: + from . import datastructures as ds + +# A regular expression for what a valid schema looks like +_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$") + +# Characters that are safe in any part of an URL. +_always_safe = frozenset( + bytearray( + b"abcdefghijklmnopqrstuvwxyz" + b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + b"0123456789" + b"-._~" + ) +) + +_hexdigits = "0123456789ABCDEFabcdef" +_hextobyte = { + f"{a}{b}".encode("ascii"): int(f"{a}{b}", 16) + for a in _hexdigits + for b in _hexdigits +} +_bytetohex = [f"%{char:02X}".encode("ascii") for char in range(256)] + + +class _URLTuple(t.NamedTuple): + scheme: str + netloc: str + path: str + query: str + fragment: str + + +class BaseURL(_URLTuple): + """Superclass of :py:class:`URL` and :py:class:`BytesURL`.""" + + __slots__ = () + _at: str + _colon: str + _lbracket: str + _rbracket: str + + def __str__(self) -> str: + return self.to_url() + + def replace(self, **kwargs: t.Any) -> "BaseURL": + """Return an URL with the same values, except for those parameters + given new values by whichever keyword arguments are specified.""" + return self._replace(**kwargs) + + @property + def host(self) -> t.Optional[str]: + """The host part of the URL if available, otherwise `None`. The + host is either the hostname or the IP address mentioned in the + URL. It will not contain the port. + """ + return self._split_host()[0] + + @property + def ascii_host(self) -> t.Optional[str]: + """Works exactly like :attr:`host` but will return a result that + is restricted to ASCII. If it finds a netloc that is not ASCII + it will attempt to idna decode it. This is useful for socket + operations when the URL might include internationalized characters. + """ + rv = self.host + if rv is not None and isinstance(rv, str): + try: + rv = _encode_idna(rv) # type: ignore + except UnicodeError: + rv = rv.encode("ascii", "ignore") # type: ignore + return _to_str(rv, "ascii", "ignore") + + @property + def port(self) -> t.Optional[int]: + """The port in the URL as an integer if it was present, `None` + otherwise. This does not fill in default ports. + """ + try: + rv = int(_to_str(self._split_host()[1])) + if 0 <= rv <= 65535: + return rv + except (ValueError, TypeError): + pass + return None + + @property + def auth(self) -> t.Optional[str]: + """The authentication part in the URL if available, `None` + otherwise. + """ + return self._split_netloc()[0] + + @property + def username(self) -> t.Optional[str]: + """The username if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a string. + """ + rv = self._split_auth()[0] + if rv is not None: + return _url_unquote_legacy(rv) + return None + + @property + def raw_username(self) -> t.Optional[str]: + """The username if it was part of the URL, `None` otherwise. + Unlike :attr:`username` this one is not being decoded. + """ + return self._split_auth()[0] + + @property + def password(self) -> t.Optional[str]: + """The password if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a string. + """ + rv = self._split_auth()[1] + if rv is not None: + return _url_unquote_legacy(rv) + return None + + @property + def raw_password(self) -> t.Optional[str]: + """The password if it was part of the URL, `None` otherwise. + Unlike :attr:`password` this one is not being decoded. + """ + return self._split_auth()[1] + + def decode_query(self, *args: t.Any, **kwargs: t.Any) -> "ds.MultiDict[str, str]": + """Decodes the query part of the URL. Ths is a shortcut for + calling :func:`url_decode` on the query argument. The arguments and + keyword arguments are forwarded to :func:`url_decode` unchanged. + """ + return url_decode(self.query, *args, **kwargs) + + def join(self, *args: t.Any, **kwargs: t.Any) -> "BaseURL": + """Joins this URL with another one. This is just a convenience + function for calling into :meth:`url_join` and then parsing the + return value again. + """ + return url_parse(url_join(self, *args, **kwargs)) + + def to_url(self) -> str: + """Returns a URL string or bytes depending on the type of the + information stored. This is just a convenience function + for calling :meth:`url_unparse` for this URL. + """ + return url_unparse(self) + + def encode_netloc(self) -> str: + """Encodes the netloc part to an ASCII safe URL as bytes.""" + rv = self.ascii_host or "" + if ":" in rv: + rv = f"[{rv}]" + port = self.port + if port is not None: + rv = f"{rv}:{port}" + auth = ":".join( + filter( + None, + [ + url_quote(self.raw_username or "", "utf-8", "strict", "/:%"), + url_quote(self.raw_password or "", "utf-8", "strict", "/:%"), + ], + ) + ) + if auth: + rv = f"{auth}@{rv}" + return rv + + def decode_netloc(self) -> str: + """Decodes the netloc part into a string.""" + rv = _decode_idna(self.host or "") + + if ":" in rv: + rv = f"[{rv}]" + port = self.port + if port is not None: + rv = f"{rv}:{port}" + auth = ":".join( + filter( + None, + [ + _url_unquote_legacy(self.raw_username or "", "/:%@"), + _url_unquote_legacy(self.raw_password or "", "/:%@"), + ], + ) + ) + if auth: + rv = f"{auth}@{rv}" + return rv + + def to_uri_tuple(self) -> "BaseURL": + """Returns a :class:`BytesURL` tuple that holds a URI. This will + encode all the information in the URL properly to ASCII using the + rules a web browser would follow. + + It's usually more interesting to directly call :meth:`iri_to_uri` which + will return a string. + """ + return url_parse(iri_to_uri(self)) + + def to_iri_tuple(self) -> "BaseURL": + """Returns a :class:`URL` tuple that holds a IRI. This will try + to decode as much information as possible in the URL without + losing information similar to how a web browser does it for the + URL bar. + + It's usually more interesting to directly call :meth:`uri_to_iri` which + will return a string. + """ + return url_parse(uri_to_iri(self)) + + def get_file_location( + self, pathformat: t.Optional[str] = None + ) -> t.Tuple[t.Optional[str], t.Optional[str]]: + """Returns a tuple with the location of the file in the form + ``(server, location)``. If the netloc is empty in the URL or + points to localhost, it's represented as ``None``. + + The `pathformat` by default is autodetection but needs to be set + when working with URLs of a specific system. The supported values + are ``'windows'`` when working with Windows or DOS paths and + ``'posix'`` when working with posix paths. + + If the URL does not point to a local file, the server and location + are both represented as ``None``. + + :param pathformat: The expected format of the path component. + Currently ``'windows'`` and ``'posix'`` are + supported. Defaults to ``None`` which is + autodetect. + """ + if self.scheme != "file": + return None, None + + path = url_unquote(self.path) + host = self.netloc or None + + if pathformat is None: + if os.name == "nt": + pathformat = "windows" + else: + pathformat = "posix" + + if pathformat == "windows": + if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": + path = f"{path[1:2]}:{path[3:]}" + windows_share = path[:3] in ("\\" * 3, "/" * 3) + import ntpath + + path = ntpath.normpath(path) + # Windows shared drives are represented as ``\\host\\directory``. + # That results in a URL like ``file://///host/directory``, and a + # path like ``///host/directory``. We need to special-case this + # because the path contains the hostname. + if windows_share and host is None: + parts = path.lstrip("\\").split("\\", 1) + if len(parts) == 2: + host, path = parts + else: + host = parts[0] + path = "" + elif pathformat == "posix": + import posixpath + + path = posixpath.normpath(path) + else: + raise TypeError(f"Invalid path format {pathformat!r}") + + if host in ("127.0.0.1", "::1", "localhost"): + host = None + + return host, path + + def _split_netloc(self) -> t.Tuple[t.Optional[str], str]: + if self._at in self.netloc: + auth, _, netloc = self.netloc.partition(self._at) + return auth, netloc + return None, self.netloc + + def _split_auth(self) -> t.Tuple[t.Optional[str], t.Optional[str]]: + auth = self._split_netloc()[0] + if not auth: + return None, None + if self._colon not in auth: + return auth, None + + username, _, password = auth.partition(self._colon) + return username, password + + def _split_host(self) -> t.Tuple[t.Optional[str], t.Optional[str]]: + rv = self._split_netloc()[1] + if not rv: + return None, None + + if not rv.startswith(self._lbracket): + if self._colon in rv: + host, _, port = rv.partition(self._colon) + return host, port + return rv, None + + idx = rv.find(self._rbracket) + if idx < 0: + return rv, None + + host = rv[1:idx] + rest = rv[idx + 1 :] + if rest.startswith(self._colon): + return host, rest[1:] + return host, None + + +class URL(BaseURL): + """Represents a parsed URL. This behaves like a regular tuple but + also has some extra attributes that give further insight into the + URL. + """ + + __slots__ = () + _at = "@" + _colon = ":" + _lbracket = "[" + _rbracket = "]" + + def encode(self, charset: str = "utf-8", errors: str = "replace") -> "BytesURL": + """Encodes the URL to a tuple made out of bytes. The charset is + only being used for the path, query and fragment. + """ + return BytesURL( + self.scheme.encode("ascii"), # type: ignore + self.encode_netloc(), + self.path.encode(charset, errors), # type: ignore + self.query.encode(charset, errors), # type: ignore + self.fragment.encode(charset, errors), # type: ignore + ) + + +class BytesURL(BaseURL): + """Represents a parsed URL in bytes.""" + + __slots__ = () + _at = b"@" # type: ignore + _colon = b":" # type: ignore + _lbracket = b"[" # type: ignore + _rbracket = b"]" # type: ignore + + def __str__(self) -> str: + return self.to_url().decode("utf-8", "replace") # type: ignore + + def encode_netloc(self) -> bytes: # type: ignore + """Returns the netloc unchanged as bytes.""" + return self.netloc # type: ignore + + def decode(self, charset: str = "utf-8", errors: str = "replace") -> "URL": + """Decodes the URL to a tuple made out of strings. The charset is + only being used for the path, query and fragment. + """ + return URL( + self.scheme.decode("ascii"), # type: ignore + self.decode_netloc(), + self.path.decode(charset, errors), # type: ignore + self.query.decode(charset, errors), # type: ignore + self.fragment.decode(charset, errors), # type: ignore + ) + + +_unquote_maps: t.Dict[t.FrozenSet[int], t.Dict[bytes, int]] = {frozenset(): _hextobyte} + + +def _unquote_to_bytes( + string: t.Union[str, bytes], unsafe: t.Union[str, bytes] = "" +) -> bytes: + if isinstance(string, str): + string = string.encode("utf-8") + + if isinstance(unsafe, str): + unsafe = unsafe.encode("utf-8") + + unsafe = frozenset(bytearray(unsafe)) + groups = iter(string.split(b"%")) + result = bytearray(next(groups, b"")) + + try: + hex_to_byte = _unquote_maps[unsafe] + except KeyError: + hex_to_byte = _unquote_maps[unsafe] = { + h: b for h, b in _hextobyte.items() if b not in unsafe + } + + for group in groups: + code = group[:2] + + if code in hex_to_byte: + result.append(hex_to_byte[code]) + result.extend(group[2:]) + else: + result.append(37) # % + result.extend(group) + + return bytes(result) + + +def _url_encode_impl( + obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], + charset: str, + sort: bool, + key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]], +) -> t.Iterator[str]: + from .datastructures import iter_multi_items + + iterable: t.Iterable[t.Tuple[str, str]] = iter_multi_items(obj) + + if sort: + iterable = sorted(iterable, key=key) + + for key_str, value_str in iterable: + if value_str is None: + continue + + if not isinstance(key_str, bytes): + key_bytes = str(key_str).encode(charset) + else: + key_bytes = key_str + + if not isinstance(value_str, bytes): + value_bytes = str(value_str).encode(charset) + else: + value_bytes = value_str + + yield f"{_fast_url_quote_plus(key_bytes)}={_fast_url_quote_plus(value_bytes)}" + + +def _url_unquote_legacy(value: str, unsafe: str = "") -> str: + try: + return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe) + except UnicodeError: + return url_unquote(value, charset="latin1", unsafe=unsafe) + + +def url_parse( + url: str, scheme: t.Optional[str] = None, allow_fragments: bool = True +) -> BaseURL: + """Parses a URL from a string into a :class:`URL` tuple. If the URL + is lacking a scheme it can be provided as second argument. Otherwise, + it is ignored. Optionally fragments can be stripped from the URL + by setting `allow_fragments` to `False`. + + The inverse of this function is :func:`url_unparse`. + + :param url: the URL to parse. + :param scheme: the default schema to use if the URL is schemaless. + :param allow_fragments: if set to `False` a fragment will be removed + from the URL. + """ + s = _make_encode_wrapper(url) + is_text_based = isinstance(url, str) + + if scheme is None: + scheme = s("") + netloc = query = fragment = s("") + i = url.find(s(":")) + if i > 0 and _scheme_re.match(_to_str(url[:i], errors="replace")): + # make sure "iri" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i + 1 :] + if not rest or any(c not in s("0123456789") for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == s("//"): + delim = len(url) + for c in s("/?#"): + wdelim = url.find(c, 2) + if wdelim >= 0: + delim = min(delim, wdelim) + netloc, url = url[2:delim], url[delim:] + if (s("[") in netloc and s("]") not in netloc) or ( + s("]") in netloc and s("[") not in netloc + ): + raise ValueError("Invalid IPv6 URL") + + if allow_fragments and s("#") in url: + url, fragment = url.split(s("#"), 1) + if s("?") in url: + url, query = url.split(s("?"), 1) + + result_type = URL if is_text_based else BytesURL + return result_type(scheme, netloc, url, query, fragment) + + +def _make_fast_url_quote( + charset: str = "utf-8", + errors: str = "strict", + safe: t.Union[str, bytes] = "/:", + unsafe: t.Union[str, bytes] = "", +) -> t.Callable[[bytes], str]: + """Precompile the translation table for a URL encoding function. + + Unlike :func:`url_quote`, the generated function only takes the + string to quote. + + :param charset: The charset to encode the result with. + :param errors: How to handle encoding errors. + :param safe: An optional sequence of safe characters to never encode. + :param unsafe: An optional sequence of unsafe characters to always encode. + """ + if isinstance(safe, str): + safe = safe.encode(charset, errors) + + if isinstance(unsafe, str): + unsafe = unsafe.encode(charset, errors) + + safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) + table = [chr(c) if c in safe else f"%{c:02X}" for c in range(256)] + + def quote(string: bytes) -> str: + return "".join([table[c] for c in string]) + + return quote + + +_fast_url_quote = _make_fast_url_quote() +_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+") + + +def _fast_url_quote_plus(string: bytes) -> str: + return _fast_quote_plus(string).replace(" ", "+") + + +def url_quote( + string: t.Union[str, bytes], + charset: str = "utf-8", + errors: str = "strict", + safe: t.Union[str, bytes] = "/:", + unsafe: t.Union[str, bytes] = "", +) -> str: + """URL encode a single string with a given encoding. + + :param s: the string to quote. + :param charset: the charset to be used. + :param safe: an optional sequence of safe characters. + :param unsafe: an optional sequence of unsafe characters. + + .. versionadded:: 0.9.2 + The `unsafe` parameter was added. + """ + if not isinstance(string, (str, bytes, bytearray)): + string = str(string) + if isinstance(string, str): + string = string.encode(charset, errors) + if isinstance(safe, str): + safe = safe.encode(charset, errors) + if isinstance(unsafe, str): + unsafe = unsafe.encode(charset, errors) + safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) + rv = bytearray() + for char in bytearray(string): + if char in safe: + rv.append(char) + else: + rv.extend(_bytetohex[char]) + return bytes(rv).decode(charset) + + +def url_quote_plus( + string: str, charset: str = "utf-8", errors: str = "strict", safe: str = "" +) -> str: + """URL encode a single string with the given encoding and convert + whitespace to "+". + + :param s: The string to quote. + :param charset: The charset to be used. + :param safe: An optional sequence of safe characters. + """ + return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+") + + +def url_unparse(components: t.Tuple[str, str, str, str, str]) -> str: + """The reverse operation to :meth:`url_parse`. This accepts arbitrary + as well as :class:`URL` tuples and returns a URL as a string. + + :param components: the parsed URL as tuple which should be converted + into a URL string. + """ + _check_str_tuple(components) + scheme, netloc, path, query, fragment = components + s = _make_encode_wrapper(scheme) + url = s("") + + # We generally treat file:///x and file:/x the same which is also + # what browsers seem to do. This also allows us to ignore a schema + # register for netloc utilization or having to differentiate between + # empty and missing netloc. + if netloc or (scheme and path.startswith(s("/"))): + if path and path[:1] != s("/"): + path = s("/") + path + url = s("//") + (netloc or s("")) + path + elif path: + url += path + if scheme: + url = scheme + s(":") + url + if query: + url = url + s("?") + query + if fragment: + url = url + s("#") + fragment + return url + + +def url_unquote( + s: t.Union[str, bytes], + charset: str = "utf-8", + errors: str = "replace", + unsafe: str = "", +) -> str: + """URL decode a single string with a given encoding. If the charset + is set to `None` no decoding is performed and raw bytes are + returned. + + :param s: the string to unquote. + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param errors: the error handling for the charset decoding. + """ + rv = _unquote_to_bytes(s, unsafe) + if charset is None: + return rv + return rv.decode(charset, errors) + + +def url_unquote_plus( + s: t.Union[str, bytes], charset: str = "utf-8", errors: str = "replace" +) -> str: + """URL decode a single string with the given `charset` and decode "+" to + whitespace. + + Per default encoding errors are ignored. If you want a different behavior + you can set `errors` to ``'replace'`` or ``'strict'``. + + :param s: The string to unquote. + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param errors: The error handling for the `charset` decoding. + """ + if isinstance(s, str): + s = s.replace("+", " ") + else: + s = s.replace(b"+", b" ") + return url_unquote(s, charset, errors) + + +def url_fix(s: str, charset: str = "utf-8") -> str: + r"""Sometimes you get an URL by a user that just isn't a real URL because + it contains unsafe characters like ' ' and so on. This function can fix + some of the problems in a similar way browsers handle data entered by the + user: + + >>> url_fix('http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') + 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' + + :param s: the string with the URL to fix. + :param charset: The target charset for the URL if the url was given + as a string. + """ + # First step is to switch to text processing and to convert + # backslashes (which are invalid in URLs anyways) to slashes. This is + # consistent with what Chrome does. + s = _to_str(s, charset, "replace").replace("\\", "/") + + # For the specific case that we look like a malformed windows URL + # we want to fix this up manually: + if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"): + s = f"file:///{s[7:]}" + + url = url_parse(s) + path = url_quote(url.path, charset, safe="/%+$!*'(),") + qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),") + anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),") + return url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor)) + + +# not-unreserved characters remain quoted when unquoting to IRI +_to_iri_unsafe = "".join([chr(c) for c in range(128) if c not in _always_safe]) + + +def _codec_error_url_quote(e: UnicodeError) -> t.Tuple[str, int]: + """Used in :func:`uri_to_iri` after unquoting to re-quote any + invalid bytes. + """ + # the docs state that UnicodeError does have these attributes, + # but mypy isn't picking them up + out = _fast_url_quote(e.object[e.start : e.end]) # type: ignore + return out, e.end # type: ignore + + +codecs.register_error("werkzeug.url_quote", _codec_error_url_quote) + + +def uri_to_iri( + uri: t.Union[str, t.Tuple[str, str, str, str, str]], + charset: str = "utf-8", + errors: str = "werkzeug.url_quote", +) -> str: + """Convert a URI to an IRI. All valid UTF-8 characters are unquoted, + leaving all reserved and invalid characters quoted. If the URL has + a domain, it is decoded from Punycode. + + >>> uri_to_iri("http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF") + 'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF' + + :param uri: The URI to convert. + :param charset: The encoding to encode unquoted bytes with. + :param errors: Error handler to use during ``bytes.encode``. By + default, invalid bytes are left quoted. + + .. versionchanged:: 0.15 + All reserved and invalid characters remain quoted. Previously, + only some reserved characters were preserved, and invalid bytes + were replaced instead of left quoted. + + .. versionadded:: 0.6 + """ + if isinstance(uri, tuple): + uri = url_unparse(uri) + + uri = url_parse(_to_str(uri, charset)) + path = url_unquote(uri.path, charset, errors, _to_iri_unsafe) + query = url_unquote(uri.query, charset, errors, _to_iri_unsafe) + fragment = url_unquote(uri.fragment, charset, errors, _to_iri_unsafe) + return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment)) + + +# reserved characters remain unquoted when quoting to URI +_to_uri_safe = ":/?#[]@!$&'()*+,;=%" + + +def iri_to_uri( + iri: t.Union[str, t.Tuple[str, str, str, str, str]], + charset: str = "utf-8", + errors: str = "strict", + safe_conversion: bool = False, +) -> str: + """Convert an IRI to a URI. All non-ASCII and unsafe characters are + quoted. If the URL has a domain, it is encoded to Punycode. + + >>> iri_to_uri('http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF') + 'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF' + + :param iri: The IRI to convert. + :param charset: The encoding of the IRI. + :param errors: Error handler to use during ``bytes.encode``. + :param safe_conversion: Return the URL unchanged if it only contains + ASCII characters and no whitespace. See the explanation below. + + There is a general problem with IRI conversion with some protocols + that are in violation of the URI specification. Consider the + following two IRIs:: + + magnet:?xt=uri:whatever + itms-services://?action=download-manifest + + After parsing, we don't know if the scheme requires the ``//``, + which is dropped if empty, but conveys different meanings in the + final URL if it's present or not. In this case, you can use + ``safe_conversion``, which will return the URL unchanged if it only + contains ASCII characters and no whitespace. This can result in a + URI with unquoted characters if it was not already quoted correctly, + but preserves the URL's semantics. Werkzeug uses this for the + ``Location`` header for redirects. + + .. versionchanged:: 0.15 + All reserved characters remain unquoted. Previously, only some + reserved characters were left unquoted. + + .. versionchanged:: 0.9.6 + The ``safe_conversion`` parameter was added. + + .. versionadded:: 0.6 + """ + if isinstance(iri, tuple): + iri = url_unparse(iri) + + if safe_conversion: + # If we're not sure if it's safe to convert the URL, and it only + # contains ASCII characters, return it unconverted. + try: + native_iri = _to_str(iri) + ascii_iri = native_iri.encode("ascii") + + # Only return if it doesn't have whitespace. (Why?) + if len(ascii_iri.split()) == 1: + return native_iri + except UnicodeError: + pass + + iri = url_parse(_to_str(iri, charset, errors)) + path = url_quote(iri.path, charset, errors, _to_uri_safe) + query = url_quote(iri.query, charset, errors, _to_uri_safe) + fragment = url_quote(iri.fragment, charset, errors, _to_uri_safe) + return url_unparse((iri.scheme, iri.encode_netloc(), path, query, fragment)) + + +def url_decode( + s: t.AnyStr, + charset: str = "utf-8", + decode_keys: None = None, + include_empty: bool = True, + errors: str = "replace", + separator: str = "&", + cls: t.Optional[t.Type["ds.MultiDict"]] = None, +) -> "ds.MultiDict[str, str]": + """Parse a query string and return it as a :class:`MultiDict`. + + :param s: The query string to parse. + :param charset: Decode bytes to string with this charset. If not + given, bytes are returned as-is. + :param include_empty: Include keys with empty values in the dict. + :param errors: Error handling behavior when decoding bytes. + :param separator: Separator character between pairs. + :param cls: Container to hold result instead of :class:`MultiDict`. + + .. versionchanged:: 2.0 + The ``decode_keys`` parameter is deprecated and will be removed + in Werkzeug 2.1. + + .. versionchanged:: 0.5 + In previous versions ";" and "&" could be used for url decoding. + Now only "&" is supported. If you want to use ";", a different + ``separator`` can be provided. + + .. versionchanged:: 0.5 + The ``cls`` parameter was added. + """ + if decode_keys is not None: + warnings.warn( + "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + if cls is None: + from .datastructures import MultiDict # noqa: F811 + + cls = MultiDict + if isinstance(s, str) and not isinstance(separator, str): + separator = separator.decode(charset or "ascii") + elif isinstance(s, bytes) and not isinstance(separator, bytes): + separator = separator.encode(charset or "ascii") # type: ignore + return cls( + _url_decode_impl( + s.split(separator), charset, include_empty, errors # type: ignore + ) + ) + + +def url_decode_stream( + stream: t.IO[bytes], + charset: str = "utf-8", + decode_keys: None = None, + include_empty: bool = True, + errors: str = "replace", + separator: bytes = b"&", + cls: t.Optional[t.Type["ds.MultiDict"]] = None, + limit: t.Optional[int] = None, + return_iterator: bool = False, +) -> "ds.MultiDict[str, str]": + """Works like :func:`url_decode` but decodes a stream. The behavior + of stream and limit follows functions like + :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is + directly fed to the `cls` so you can consume the data while it's + parsed. + + :param stream: a stream with the encoded querystring + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param include_empty: Set to `False` if you don't want empty values to + appear in the dict. + :param errors: the decoding error behavior. + :param separator: the pair separator to be used, defaults to ``&`` + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param limit: the content length of the URL data. Not necessary if + a limited stream is provided. + + .. versionchanged:: 2.0 + The ``decode_keys`` and ``return_iterator`` parameters are + deprecated and will be removed in Werkzeug 2.1. + + .. versionadded:: 0.8 + """ + from .wsgi import make_chunk_iter + + if decode_keys is not None: + warnings.warn( + "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + + pair_iter = make_chunk_iter(stream, separator, limit) + decoder = _url_decode_impl(pair_iter, charset, include_empty, errors) + + if return_iterator: + warnings.warn( + "'return_iterator' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + return decoder # type: ignore + + if cls is None: + from .datastructures import MultiDict # noqa: F811 + + cls = MultiDict + + return cls(decoder) + + +def _url_decode_impl( + pair_iter: t.Iterable[t.AnyStr], charset: str, include_empty: bool, errors: str +) -> t.Iterator[t.Tuple[str, str]]: + for pair in pair_iter: + if not pair: + continue + s = _make_encode_wrapper(pair) + equal = s("=") + if equal in pair: + key, value = pair.split(equal, 1) + else: + if not include_empty: + continue + key = pair + value = s("") + yield ( + url_unquote_plus(key, charset, errors), + url_unquote_plus(value, charset, errors), + ) + + +def url_encode( + obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], + charset: str = "utf-8", + encode_keys: None = None, + sort: bool = False, + key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None, + separator: str = "&", +) -> str: + """URL encode a dict/`MultiDict`. If a value is `None` it will not appear + in the result string. Per default only values are encoded into the target + charset strings. + + :param obj: the object to encode into a query string. + :param charset: the charset of the query string. + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + + .. versionchanged:: 2.0 + The ``encode_keys`` parameter is deprecated and will be removed + in Werkzeug 2.1. + + .. versionchanged:: 0.5 + Added the ``sort``, ``key``, and ``separator`` parameters. + """ + if encode_keys is not None: + warnings.warn( + "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + separator = _to_str(separator, "ascii") + return separator.join(_url_encode_impl(obj, charset, sort, key)) + + +def url_encode_stream( + obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], + stream: t.Optional[t.IO[str]] = None, + charset: str = "utf-8", + encode_keys: None = None, + sort: bool = False, + key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None, + separator: str = "&", +) -> None: + """Like :meth:`url_encode` but writes the results to a stream + object. If the stream is `None` a generator over all encoded + pairs is returned. + + :param obj: the object to encode into a query string. + :param stream: a stream to write the encoded object into or `None` if + an iterator over the encoded pairs should be returned. In + that case the separator argument is ignored. + :param charset: the charset of the query string. + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + + .. versionchanged:: 2.0 + The ``encode_keys`` parameter is deprecated and will be removed + in Werkzeug 2.1. + + .. versionadded:: 0.8 + """ + if encode_keys is not None: + warnings.warn( + "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + separator = _to_str(separator, "ascii") + gen = _url_encode_impl(obj, charset, sort, key) + if stream is None: + return gen # type: ignore + for idx, chunk in enumerate(gen): + if idx: + stream.write(separator) + stream.write(chunk) + return None + + +def url_join( + base: t.Union[str, t.Tuple[str, str, str, str, str]], + url: t.Union[str, t.Tuple[str, str, str, str, str]], + allow_fragments: bool = True, +) -> str: + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter. + + :param base: the base URL for the join operation. + :param url: the URL to join. + :param allow_fragments: indicates whether fragments should be allowed. + """ + if isinstance(base, tuple): + base = url_unparse(base) + if isinstance(url, tuple): + url = url_unparse(url) + + _check_str_tuple((base, url)) + s = _make_encode_wrapper(base) + + if not base: + return url + if not url: + return base + + bscheme, bnetloc, bpath, bquery, bfragment = url_parse( + base, allow_fragments=allow_fragments + ) + scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments) + if scheme != bscheme: + return url + if netloc: + return url_unparse((scheme, netloc, path, query, fragment)) + netloc = bnetloc + + if path[:1] == s("/"): + segments = path.split(s("/")) + elif not path: + segments = bpath.split(s("/")) + if not query: + query = bquery + else: + segments = bpath.split(s("/"))[:-1] + path.split(s("/")) + + # If the rightmost part is "./" we want to keep the slash but + # remove the dot. + if segments[-1] == s("."): + segments[-1] = s("") + + # Resolve ".." and "." + segments = [segment for segment in segments if segment != s(".")] + while True: + i = 1 + n = len(segments) - 1 + while i < n: + if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")): + del segments[i - 1 : i + 1] + break + i += 1 + else: + break + + # Remove trailing ".." if the URL is absolute + unwanted_marker = [s(""), s("..")] + while segments[:2] == unwanted_marker: + del segments[1] + + path = s("/").join(segments) + return url_unparse((scheme, netloc, path, query, fragment)) + + +class Href: + """Implements a callable that constructs URLs with the given base. The + function can be called with any number of positional and keyword + arguments which than are used to assemble the URL. Works with URLs + and posix paths. + + Positional arguments are appended as individual segments to + the path of the URL: + + >>> href = Href('/foo') + >>> href('bar', 23) + '/foo/bar/23' + >>> href('foo', bar=23) + '/foo/foo?bar=23' + + If any of the arguments (positional or keyword) evaluates to `None` it + will be skipped. If no keyword arguments are given the last argument + can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), + otherwise the keyword arguments are used for the query parameters, cutting + off the first trailing underscore of the parameter name: + + >>> href(is_=42) + '/foo?is=42' + >>> href({'foo': 'bar'}) + '/foo?foo=bar' + + Combining of both methods is not allowed: + + >>> href({'foo': 'bar'}, bar=42) + Traceback (most recent call last): + ... + TypeError: keyword arguments and query-dicts can't be combined + + Accessing attributes on the href object creates a new href object with + the attribute name as prefix: + + >>> bar_href = href.bar + >>> bar_href("blub") + '/foo/bar/blub' + + If `sort` is set to `True` the items are sorted by `key` or the default + sorting algorithm: + + >>> href = Href("/", sort=True) + >>> href(a=1, b=2, c=3) + '/?a=1&b=2&c=3' + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :mod:`werkzeug.routing` + instead. + + .. versionadded:: 0.5 + `sort` and `key` were added. + """ + + def __init__( # type: ignore + self, base="./", charset="utf-8", sort=False, key=None + ): + warnings.warn( + "'Href' is deprecated and will be removed in Werkzeug 2.1." + " Use 'werkzeug.routing' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if not base: + base = "./" + self.base = base + self.charset = charset + self.sort = sort + self.key = key + + def __getattr__(self, name): # type: ignore + if name[:2] == "__": + raise AttributeError(name) + base = self.base + if base[-1:] != "/": + base += "/" + return Href(url_join(base, name), self.charset, self.sort, self.key) + + def __call__(self, *path, **query): # type: ignore + if path and isinstance(path[-1], dict): + if query: + raise TypeError("keyword arguments and query-dicts can't be combined") + query, path = path[-1], path[:-1] + elif query: + query = {k[:-1] if k.endswith("_") else k: v for k, v in query.items()} + path = "/".join( + [ + _to_str(url_quote(x, self.charset), "ascii") + for x in path + if x is not None + ] + ).lstrip("/") + rv = self.base + if path: + if not rv.endswith("/"): + rv += "/" + rv = url_join(rv, f"./{path}") + if query: + rv += "?" + _to_str( + url_encode(query, self.charset, sort=self.sort, key=self.key), "ascii" + ) + return rv diff --git a/venv/lib/python3.8/site-packages/werkzeug/user_agent.py b/venv/lib/python3.8/site-packages/werkzeug/user_agent.py new file mode 100644 index 0000000..66ffcbe --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/user_agent.py @@ -0,0 +1,47 @@ +import typing as t + + +class UserAgent: + """Represents a parsed user agent header value. + + The default implementation does no parsing, only the :attr:`string` + attribute is set. A subclass may parse the string to set the + common attributes or expose other information. Set + :attr:`werkzeug.wrappers.Request.user_agent_class` to use a + subclass. + + :param string: The header value to parse. + + .. versionadded:: 2.0 + This replaces the previous ``useragents`` module, but does not + provide a built-in parser. + """ + + platform: t.Optional[str] = None + """The OS name, if it could be parsed from the string.""" + + browser: t.Optional[str] = None + """The browser name, if it could be parsed from the string.""" + + version: t.Optional[str] = None + """The browser version, if it could be parsed from the string.""" + + language: t.Optional[str] = None + """The browser language, if it could be parsed from the string.""" + + def __init__(self, string: str) -> None: + self.string: str = string + """The original header value.""" + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.browser}/{self.version}>" + + def __str__(self) -> str: + return self.string + + def __bool__(self) -> bool: + return bool(self.browser) + + def to_header(self) -> str: + """Convert to a header value.""" + return self.string diff --git a/venv/lib/python3.8/site-packages/werkzeug/useragents.py b/venv/lib/python3.8/site-packages/werkzeug/useragents.py new file mode 100644 index 0000000..4deed8f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/useragents.py @@ -0,0 +1,215 @@ +import re +import typing as t +import warnings + +from .user_agent import UserAgent as _BaseUserAgent + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + + +class _UserAgentParser: + platform_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = ( + (" cros ", "chromeos"), + ("iphone|ios", "iphone"), + ("ipad", "ipad"), + (r"darwin\b|mac\b|os\s*x", "macos"), + ("win", "windows"), + (r"android", "android"), + ("netbsd", "netbsd"), + ("openbsd", "openbsd"), + ("freebsd", "freebsd"), + ("dragonfly", "dragonflybsd"), + ("(sun|i86)os", "solaris"), + (r"x11\b|lin(\b|ux)?", "linux"), + (r"nintendo\s+wii", "wii"), + ("irix", "irix"), + ("hp-?ux", "hpux"), + ("aix", "aix"), + ("sco|unix_sv", "sco"), + ("bsd", "bsd"), + ("amiga", "amiga"), + ("blackberry|playbook", "blackberry"), + ("symbian", "symbian"), + ) + browser_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = ( + ("googlebot", "google"), + ("msnbot", "msn"), + ("yahoo", "yahoo"), + ("ask jeeves", "ask"), + (r"aol|america\s+online\s+browser", "aol"), + (r"opera|opr", "opera"), + ("edge|edg", "edge"), + ("chrome|crios", "chrome"), + ("seamonkey", "seamonkey"), + ("firefox|firebird|phoenix|iceweasel", "firefox"), + ("galeon", "galeon"), + ("safari|version", "safari"), + ("webkit", "webkit"), + ("camino", "camino"), + ("konqueror", "konqueror"), + ("k-meleon", "kmeleon"), + ("netscape", "netscape"), + (r"msie|microsoft\s+internet\s+explorer|trident/.+? rv:", "msie"), + ("lynx", "lynx"), + ("links", "links"), + ("Baiduspider", "baidu"), + ("bingbot", "bing"), + ("mozilla", "mozilla"), + ) + + _browser_version_re = r"(?:{pattern})[/\sa-z(]*(\d+[.\da-z]+)?" + _language_re = re.compile( + r"(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|" + r"(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)" + ) + + def __init__(self) -> None: + self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platform_rules] + self.browsers = [ + (b, re.compile(self._browser_version_re.format(pattern=a), re.I)) + for a, b in self.browser_rules + ] + + def __call__( + self, user_agent: str + ) -> t.Tuple[t.Optional[str], t.Optional[str], t.Optional[str], t.Optional[str]]: + platform: t.Optional[str] + browser: t.Optional[str] + version: t.Optional[str] + language: t.Optional[str] + + for platform, regex in self.platforms: # noqa: B007 + match = regex.search(user_agent) + if match is not None: + break + else: + platform = None + + # Except for Trident, all browser key words come after the last ')' + last_closing_paren = 0 + if ( + not re.compile(r"trident/.+? rv:", re.I).search(user_agent) + and ")" in user_agent + and user_agent[-1] != ")" + ): + last_closing_paren = user_agent.rindex(")") + + for browser, regex in self.browsers: # noqa: B007 + match = regex.search(user_agent[last_closing_paren:]) + if match is not None: + version = match.group(1) + break + else: + browser = version = None + match = self._language_re.search(user_agent) + if match is not None: + language = match.group(1) or match.group(2) + else: + language = None + return platform, browser, version, language + + +# It wasn't public, but users might have imported it anyway, show a +# warning if a user created an instance. +class UserAgentParser(_UserAgentParser): + """A simple user agent parser. Used by the `UserAgent`. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use a dedicated parser library + instead. + """ + + def __init__(self) -> None: + warnings.warn( + "'UserAgentParser' is deprecated and will be removed in" + " Werkzeug 2.1. Use a dedicated parser library instead.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__() + + +class _deprecated_property(property): + def __init__(self, fget: t.Callable[["_UserAgent"], t.Any]) -> None: + super().__init__(fget) + self.message = ( + "The built-in user agent parser is deprecated and will be" + f" removed in Werkzeug 2.1. The {fget.__name__!r} property" + " will be 'None'. Subclass 'werkzeug.user_agent.UserAgent'" + " and set 'Request.user_agent_class' to use a different" + " parser." + ) + + def __get__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + warnings.warn(self.message, DeprecationWarning, stacklevel=3) + return super().__get__(*args, **kwargs) + + +# This is what Request.user_agent returns for now, only show warnings on +# attribute access, not creation. +class _UserAgent(_BaseUserAgent): + _parser = _UserAgentParser() + + def __init__(self, string: str) -> None: + super().__init__(string) + info = self._parser(string) + self._platform, self._browser, self._version, self._language = info + + @_deprecated_property + def platform(self) -> t.Optional[str]: # type: ignore + return self._platform + + @_deprecated_property + def browser(self) -> t.Optional[str]: # type: ignore + return self._browser + + @_deprecated_property + def version(self) -> t.Optional[str]: # type: ignore + return self._version + + @_deprecated_property + def language(self) -> t.Optional[str]: # type: ignore + return self._language + + +# This is what users might be importing, show warnings on create. +class UserAgent(_UserAgent): + """Represents a parsed user agent header value. + + This uses a basic parser to try to extract some information from the + header. + + :param environ_or_string: The header value to parse, or a WSGI + environ containing the header. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Subclass + :class:`werkzeug.user_agent.UserAgent` (note the new module + name) to use a dedicated parser instead. + + .. versionchanged:: 2.0 + Passing a WSGI environ is deprecated and will be removed in 2.1. + """ + + def __init__(self, environ_or_string: "t.Union[str, WSGIEnvironment]") -> None: + if isinstance(environ_or_string, dict): + warnings.warn( + "Passing an environ to 'UserAgent' is deprecated and" + " will be removed in Werkzeug 2.1. Pass the header" + " value string instead.", + DeprecationWarning, + stacklevel=2, + ) + string = environ_or_string.get("HTTP_USER_AGENT", "") + else: + string = environ_or_string + + warnings.warn( + "The 'werkzeug.useragents' module is deprecated and will be" + " removed in Werkzeug 2.1. The new base API is" + " 'werkzeug.user_agent.UserAgent'.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(string) diff --git a/venv/lib/python3.8/site-packages/werkzeug/utils.py b/venv/lib/python3.8/site-packages/werkzeug/utils.py new file mode 100644 index 0000000..9007231 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/utils.py @@ -0,0 +1,1099 @@ +import codecs +import io +import mimetypes +import os +import pkgutil +import re +import sys +import typing as t +import unicodedata +import warnings +from datetime import datetime +from html.entities import name2codepoint +from time import time +from zlib import adler32 + +from ._internal import _DictAccessorProperty +from ._internal import _missing +from ._internal import _parse_signature +from ._internal import _TAccessorValue +from .datastructures import Headers +from .exceptions import NotFound +from .exceptions import RequestedRangeNotSatisfiable +from .security import safe_join +from .urls import url_quote +from .wsgi import wrap_file + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request + from .wrappers.response import Response + +_T = t.TypeVar("_T") + +_entity_re = re.compile(r"&([^;]+);") +_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") +_windows_device_files = ( + "CON", + "AUX", + "COM1", + "COM2", + "COM3", + "COM4", + "LPT1", + "LPT2", + "LPT3", + "PRN", + "NUL", +) + + +class cached_property(property, t.Generic[_T]): + """A :func:`property` that is only evaluated once. Subsequent access + returns the cached value. Setting the property sets the cached + value. Deleting the property clears the cached value, accessing it + again will evaluate it again. + + .. code-block:: python + + class Example: + @cached_property + def value(self): + # calculate something important here + return 42 + + e = Example() + e.value # evaluates + e.value # uses cache + e.value = 16 # sets cache + del e.value # clears cache + + The class must have a ``__dict__`` for this to work. + + .. versionchanged:: 2.0 + ``del obj.name`` clears the cached value. + """ + + def __init__( + self, + fget: t.Callable[[t.Any], _T], + name: t.Optional[str] = None, + doc: t.Optional[str] = None, + ) -> None: + super().__init__(fget, doc=doc) + self.__name__ = name or fget.__name__ + self.__module__ = fget.__module__ + + def __set__(self, obj: object, value: _T) -> None: + obj.__dict__[self.__name__] = value + + def __get__(self, obj: object, type: type = None) -> _T: # type: ignore + if obj is None: + return self # type: ignore + + value: _T = obj.__dict__.get(self.__name__, _missing) + + if value is _missing: + value = self.fget(obj) # type: ignore + obj.__dict__[self.__name__] = value + + return value + + def __delete__(self, obj: object) -> None: + del obj.__dict__[self.__name__] + + +def invalidate_cached_property(obj: object, name: str) -> None: + """Invalidates the cache for a :class:`cached_property`: + + >>> class Test(object): + ... @cached_property + ... def magic_number(self): + ... print("recalculating...") + ... return 42 + ... + >>> var = Test() + >>> var.magic_number + recalculating... + 42 + >>> var.magic_number + 42 + >>> invalidate_cached_property(var, "magic_number") + >>> var.magic_number + recalculating... + 42 + + You must pass the name of the cached property as the second argument. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use ``del obj.name`` instead. + """ + warnings.warn( + "'invalidate_cached_property' is deprecated and will be removed" + " in Werkzeug 2.1. Use 'del obj.name' instead.", + DeprecationWarning, + stacklevel=2, + ) + delattr(obj, name) + + +class environ_property(_DictAccessorProperty[_TAccessorValue]): + """Maps request attributes to environment variables. This works not only + for the Werkzeug request object, but also any other class with an + environ attribute: + + >>> class Test(object): + ... environ = {'key': 'value'} + ... test = environ_property('key') + >>> var = Test() + >>> var.test + 'value' + + If you pass it a second value it's used as default if the key does not + exist, the third one can be a converter that takes a value and converts + it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value + is used. If no default value is provided `None` is used. + + Per default the property is read only. You have to explicitly enable it + by passing ``read_only=False`` to the constructor. + """ + + read_only = True + + def lookup(self, obj: "Request") -> "WSGIEnvironment": + return obj.environ + + +class header_property(_DictAccessorProperty[_TAccessorValue]): + """Like `environ_property` but for headers.""" + + def lookup(self, obj: t.Union["Request", "Response"]) -> Headers: + return obj.headers + + +class HTMLBuilder: + """Helper object for HTML generation. + + Per default there are two instances of that class. The `html` one, and + the `xhtml` one for those two dialects. The class uses keyword parameters + and positional parameters to generate small snippets of HTML. + + Keyword parameters are converted to XML/SGML attributes, positional + arguments are used as children. Because Python accepts positional + arguments before keyword arguments it's a good idea to use a list with the + star-syntax for some children: + + >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ', + ... html.a('bar', href='bar.html')]) + '

    foo bar

    ' + + This class works around some browser limitations and can not be used for + arbitrary SGML/XML generation. For that purpose lxml and similar + libraries exist. + + Calling the builder escapes the string passed: + + >>> html.p(html("")) + '

    <foo>

    ' + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. + """ + + _entity_re = re.compile(r"&([^;]+);") + _entities = name2codepoint.copy() + _entities["apos"] = 39 + _empty_elements = { + "area", + "base", + "basefont", + "br", + "col", + "command", + "embed", + "frame", + "hr", + "img", + "input", + "keygen", + "isindex", + "link", + "meta", + "param", + "source", + "wbr", + } + _boolean_attributes = { + "selected", + "checked", + "compact", + "declare", + "defer", + "disabled", + "ismap", + "multiple", + "nohref", + "noresize", + "noshade", + "nowrap", + } + _plaintext_elements = {"textarea"} + _c_like_cdata = {"script", "style"} + + def __init__(self, dialect): # type: ignore + self._dialect = dialect + + def __call__(self, s): # type: ignore + import html + + warnings.warn( + "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + return html.escape(s) + + def __getattr__(self, tag): # type: ignore + import html + + warnings.warn( + "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + if tag[:2] == "__": + raise AttributeError(tag) + + def proxy(*children, **arguments): # type: ignore + buffer = f"<{tag}" + for key, value in arguments.items(): + if value is None: + continue + if key[-1] == "_": + key = key[:-1] + if key in self._boolean_attributes: + if not value: + continue + if self._dialect == "xhtml": + value = f'="{key}"' + else: + value = "" + else: + value = f'="{html.escape(value)}"' + buffer += f" {key}{value}" + if not children and tag in self._empty_elements: + if self._dialect == "xhtml": + buffer += " />" + else: + buffer += ">" + return buffer + buffer += ">" + + children_as_string = "".join([str(x) for x in children if x is not None]) + + if children_as_string: + if tag in self._plaintext_elements: + children_as_string = html.escape(children_as_string) + elif tag in self._c_like_cdata and self._dialect == "xhtml": + children_as_string = f"/**/" + buffer += children_as_string + f"" + return buffer + + return proxy + + def __repr__(self) -> str: + return f"<{type(self).__name__} for {self._dialect!r}>" + + +html = HTMLBuilder("html") +xhtml = HTMLBuilder("xhtml") + +# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in +# https://www.iana.org/assignments/media-types/media-types.xhtml +# Types listed in the XDG mime info that have a charset in the IANA registration. +_charset_mimetypes = { + "application/ecmascript", + "application/javascript", + "application/sql", + "application/xml", + "application/xml-dtd", + "application/xml-external-parsed-entity", +} + + +def get_content_type(mimetype: str, charset: str) -> str: + """Returns the full content type string with charset for a mimetype. + + If the mimetype represents text, the charset parameter will be + appended, otherwise the mimetype is returned unchanged. + + :param mimetype: The mimetype to be used as content type. + :param charset: The charset to be appended for text mimetypes. + :return: The content type. + + .. versionchanged:: 0.15 + Any type that ends with ``+xml`` gets a charset, not just those + that start with ``application/``. Known text types such as + ``application/javascript`` are also given charsets. + """ + if ( + mimetype.startswith("text/") + or mimetype in _charset_mimetypes + or mimetype.endswith("+xml") + ): + mimetype += f"; charset={charset}" + + return mimetype + + +def detect_utf_encoding(data: bytes) -> str: + """Detect which UTF encoding was used to encode the given bytes. + + The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is + accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big + or little endian. Some editors or libraries may prepend a BOM. + + :internal: + + :param data: Bytes in unknown UTF encoding. + :return: UTF encoding name + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. This is built in to + :func:`json.loads`. + + .. versionadded:: 0.15 + """ + warnings.warn( + "'detect_utf_encoding' is deprecated and will be removed in" + " Werkzeug 2.1. This is built in to 'json.loads'.", + DeprecationWarning, + stacklevel=2, + ) + head = data[:4] + + if head[:3] == codecs.BOM_UTF8: + return "utf-8-sig" + + if b"\x00" not in head: + return "utf-8" + + if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): + return "utf-32" + + if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): + return "utf-16" + + if len(head) == 4: + if head[:3] == b"\x00\x00\x00": + return "utf-32-be" + + if head[::2] == b"\x00\x00": + return "utf-16-be" + + if head[1:] == b"\x00\x00\x00": + return "utf-32-le" + + if head[1::2] == b"\x00\x00": + return "utf-16-le" + + if len(head) == 2: + return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le" + + return "utf-8" + + +def format_string(string: str, context: t.Mapping[str, t.Any]) -> str: + """String-template format a string: + + >>> format_string('$foo and ${foo}s', dict(foo=42)) + '42 and 42s' + + This does not do any attribute lookup. + + :param string: the format string. + :param context: a dict with the variables to insert. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :class:`string.Template` + instead. + """ + from string import Template + + warnings.warn( + "'utils.format_string' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'string.Template' instead.", + DeprecationWarning, + stacklevel=2, + ) + return Template(string).substitute(context) + + +def secure_filename(filename: str) -> str: + r"""Pass it a filename and it will return a secure version of it. This + filename can then safely be stored on a regular file system and passed + to :func:`os.path.join`. The filename returned is an ASCII only string + for maximum portability. + + On windows systems the function also makes sure that the file is not + named after one of the special device files. + + >>> secure_filename("My cool movie.mov") + 'My_cool_movie.mov' + >>> secure_filename("../../../etc/passwd") + 'etc_passwd' + >>> secure_filename('i contain cool \xfcml\xe4uts.txt') + 'i_contain_cool_umlauts.txt' + + The function might return an empty filename. It's your responsibility + to ensure that the filename is unique and that you abort or + generate a random filename if the function returned an empty one. + + .. versionadded:: 0.5 + + :param filename: the filename to secure + """ + filename = unicodedata.normalize("NFKD", filename) + filename = filename.encode("ascii", "ignore").decode("ascii") + + for sep in os.path.sep, os.path.altsep: + if sep: + filename = filename.replace(sep, " ") + filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip( + "._" + ) + + # on nt a couple of special files are present in each folder. We + # have to ensure that the target file is not such a filename. In + # this case we prepend an underline + if ( + os.name == "nt" + and filename + and filename.split(".")[0].upper() in _windows_device_files + ): + filename = f"_{filename}" + + return filename + + +def escape(s: t.Any) -> str: + """Replace ``&``, ``<``, ``>``, ``"``, and ``'`` with HTML-safe + sequences. + + ``None`` is escaped to an empty string. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use MarkupSafe instead. + """ + import html + + warnings.warn( + "'utils.escape' is deprecated and will be removed in Werkzeug" + " 2.1. Use MarkupSafe instead.", + DeprecationWarning, + stacklevel=2, + ) + + if s is None: + return "" + + if hasattr(s, "__html__"): + return s.__html__() # type: ignore + + if not isinstance(s, str): + s = str(s) + + return html.escape(s, quote=True) # type: ignore + + +def unescape(s: str) -> str: + """The reverse of :func:`escape`. This unescapes all the HTML + entities, not only those inserted by ``escape``. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use MarkupSafe instead. + """ + import html + + warnings.warn( + "'utils.unescape' is deprecated and will be removed in Werkzueg" + " 2.1. Use MarkupSafe instead.", + DeprecationWarning, + stacklevel=2, + ) + return html.unescape(s) + + +def redirect( + location: str, code: int = 302, Response: t.Optional[t.Type["Response"]] = None +) -> "Response": + """Returns a response object (a WSGI application) that, if called, + redirects the client to the target location. Supported codes are + 301, 302, 303, 305, 307, and 308. 300 is not supported because + it's not a real redirect and 304 because it's the answer for a + request with a request with defined If-Modified-Since headers. + + .. versionadded:: 0.6 + The location can now be a unicode string that is encoded using + the :func:`iri_to_uri` function. + + .. versionadded:: 0.10 + The class used for the Response object can now be passed in. + + :param location: the location the response should redirect to. + :param code: the redirect status code. defaults to 302. + :param class Response: a Response class to use when instantiating a + response. The default is :class:`werkzeug.wrappers.Response` if + unspecified. + """ + import html + + if Response is None: + from .wrappers import Response # type: ignore + + display_location = html.escape(location) + if isinstance(location, str): + # Safe conversion is necessary here as we might redirect + # to a broken URI scheme (for instance itms-services). + from .urls import iri_to_uri + + location = iri_to_uri(location, safe_conversion=True) + response = Response( # type: ignore + '\n' + "Redirecting...\n" + "

    Redirecting...

    \n" + "

    You should be redirected automatically to target URL: " + f'{display_location}. If' + " not click the link.", + code, + mimetype="text/html", + ) + response.headers["Location"] = location + return response + + +def append_slash_redirect(environ: "WSGIEnvironment", code: int = 301) -> "Response": + """Redirects to the same URL but with a slash appended. The behavior + of this function is undefined if the path ends with a slash already. + + :param environ: the WSGI environment for the request that triggers + the redirect. + :param code: the status code for the redirect. + """ + new_path = environ["PATH_INFO"].strip("/") + "/" + query_string = environ.get("QUERY_STRING") + if query_string: + new_path += f"?{query_string}" + return redirect(new_path, code) + + +def send_file( + path_or_file: t.Union[os.PathLike, str, t.IO[bytes]], + environ: "WSGIEnvironment", + mimetype: t.Optional[str] = None, + as_attachment: bool = False, + download_name: t.Optional[str] = None, + conditional: bool = True, + etag: t.Union[bool, str] = True, + last_modified: t.Optional[t.Union[datetime, int, float]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, + use_x_sendfile: bool = False, + response_class: t.Optional[t.Type["Response"]] = None, + _root_path: t.Optional[t.Union[os.PathLike, str]] = None, +) -> "Response": + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, ``use_x_sendfile=True`` + will tell the server to send the given path, which is much more + efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param environ: The WSGI environ for the current request. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + :param use_x_sendfile: Set the ``X-Sendfile`` header to let the + server to efficiently send the file. Requires support from the + HTTP server. Requires passing a file path. + :param response_class: Build the response using this class. Defaults + to :class:`~werkzeug.wrappers.Response`. + :param _root_path: Do not use. For internal use only. Use + :func:`send_from_directory` to safely send files under a path. + + .. versionchanged:: 2.0.2 + ``send_file`` only sets a detected ``Content-Encoding`` if + ``as_attachment`` is disabled. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + + .. versionchanged:: 2.0 + ``download_name`` replaces Flask's ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces Flask's ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces Flask's ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + If an encoding is returned when guessing ``mimetype`` from + ``download_name``, set the ``Content-Encoding`` header. + """ + if response_class is None: + from .wrappers import Response + + response_class = Response + + path: t.Optional[str] = None + file: t.Optional[t.IO[bytes]] = None + size: t.Optional[int] = None + mtime: t.Optional[float] = None + headers = Headers() + + if isinstance(path_or_file, (os.PathLike, str)) or hasattr( + path_or_file, "__fspath__" + ): + path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) + + # Flask will pass app.root_path, allowing its send_file wrapper + # to not have to deal with paths. + if _root_path is not None: + path = os.path.join(_root_path, path_or_file) + else: + path = os.path.abspath(path_or_file) + + stat = os.stat(path) + size = stat.st_size + mtime = stat.st_mtime + else: + file = path_or_file + + if download_name is None and path is not None: + download_name = os.path.basename(path) + + if mimetype is None: + if download_name is None: + raise TypeError( + "Unable to detect the MIME type because a file name is" + " not available. Either set 'download_name', pass a" + " path instead of a file, or set 'mimetype'." + ) + + mimetype, encoding = mimetypes.guess_type(download_name) + + if mimetype is None: + mimetype = "application/octet-stream" + + # Don't send encoding for attachments, it causes browsers to + # save decompress tar.gz files. + if encoding is not None and not as_attachment: + headers.set("Content-Encoding", encoding) + + if download_name is not None: + try: + download_name.encode("ascii") + except UnicodeEncodeError: + simple = unicodedata.normalize("NFKD", download_name) + simple = simple.encode("ascii", "ignore").decode("ascii") + quoted = url_quote(download_name, safe="") + names = {"filename": simple, "filename*": f"UTF-8''{quoted}"} + else: + names = {"filename": download_name} + + value = "attachment" if as_attachment else "inline" + headers.set("Content-Disposition", value, **names) + elif as_attachment: + raise TypeError( + "No name provided for attachment. Either set" + " 'download_name' or pass a path instead of a file." + ) + + if use_x_sendfile and path is not None: + headers["X-Sendfile"] = path + data = None + else: + if file is None: + file = open(path, "rb") # type: ignore + elif isinstance(file, io.BytesIO): + size = file.getbuffer().nbytes + elif isinstance(file, io.TextIOBase): + raise ValueError("Files must be opened in binary mode or use BytesIO.") + + data = wrap_file(environ, file) + + rv = response_class( + data, mimetype=mimetype, headers=headers, direct_passthrough=True + ) + + if size is not None: + rv.content_length = size + + if last_modified is not None: + rv.last_modified = last_modified # type: ignore + elif mtime is not None: + rv.last_modified = mtime # type: ignore + + rv.cache_control.no_cache = True + + # Flask will pass app.get_send_file_max_age, allowing its send_file + # wrapper to not have to deal with paths. + if callable(max_age): + max_age = max_age(path) + + if max_age is not None: + if max_age > 0: + rv.cache_control.no_cache = None + rv.cache_control.public = True + + rv.cache_control.max_age = max_age + rv.expires = int(time() + max_age) # type: ignore + + if isinstance(etag, str): + rv.set_etag(etag) + elif etag and path is not None: + check = adler32(path.encode("utf-8")) & 0xFFFFFFFF + rv.set_etag(f"{mtime}-{size}-{check}") + + if conditional: + try: + rv = rv.make_conditional(environ, accept_ranges=True, complete_length=size) + except RequestedRangeNotSatisfiable: + if file is not None: + file.close() + + raise + + # Some x-sendfile implementations incorrectly ignore the 304 + # status code and send the file anyway. + if rv.status_code == 304: + rv.headers.pop("x-sendfile", None) + + return rv + + +def send_from_directory( + directory: t.Union[os.PathLike, str], + path: t.Union[os.PathLike, str], + environ: "WSGIEnvironment", + **kwargs: t.Any, +) -> "Response": + """Send a file from within a directory using :func:`send_file`. + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + returns a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under. + :param path: The path to the file to send, relative to + ``directory``. + :param environ: The WSGI environ for the current request. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + """ + path = safe_join(os.fspath(directory), os.fspath(path)) + + if path is None: + raise NotFound() + + # Flask will pass app.root_path, allowing its send_from_directory + # wrapper to not have to deal with paths. + if "_root_path" in kwargs: + path = os.path.join(kwargs["_root_path"], path) + + try: + if not os.path.isfile(path): + raise NotFound() + except ValueError: + # path contains null byte on Python < 3.8 + raise NotFound() from None + + return send_file(path, environ, **kwargs) + + +def import_string(import_name: str, silent: bool = False) -> t.Any: + """Imports an object based on a string. This is useful if you want to + use import paths as endpoints or something similar. An import path can + be specified either in dotted notation (``xml.sax.saxutils.escape``) + or with a colon as object delimiter (``xml.sax.saxutils:escape``). + + If `silent` is True the return value will be `None` if the import fails. + + :param import_name: the dotted name for the object to import. + :param silent: if set to `True` import errors are ignored and + `None` is returned instead. + :return: imported object + """ + import_name = import_name.replace(":", ".") + try: + try: + __import__(import_name) + except ImportError: + if "." not in import_name: + raise + else: + return sys.modules[import_name] + + module_name, obj_name = import_name.rsplit(".", 1) + module = __import__(module_name, globals(), locals(), [obj_name]) + try: + return getattr(module, obj_name) + except AttributeError as e: + raise ImportError(e) from None + + except ImportError as e: + if not silent: + raise ImportStringError(import_name, e).with_traceback( + sys.exc_info()[2] + ) from None + + return None + + +def find_modules( + import_path: str, include_packages: bool = False, recursive: bool = False +) -> t.Iterator[str]: + """Finds all the modules below a package. This can be useful to + automatically import all views / controllers so that their metaclasses / + function decorators have a chance to register themselves on the + application. + + Packages are not returned unless `include_packages` is `True`. This can + also recursively list modules but in that case it will import all the + packages to get the correct load path of that module. + + :param import_path: the dotted name for the package to find child modules. + :param include_packages: set to `True` if packages should be returned, too. + :param recursive: set to `True` if recursion should happen. + :return: generator + """ + module = import_string(import_path) + path = getattr(module, "__path__", None) + if path is None: + raise ValueError(f"{import_path!r} is not a package") + basename = f"{module.__name__}." + for _importer, modname, ispkg in pkgutil.iter_modules(path): + modname = basename + modname + if ispkg: + if include_packages: + yield modname + if recursive: + yield from find_modules(modname, include_packages, True) + else: + yield modname + + +def validate_arguments(func, args, kwargs, drop_extra=True): # type: ignore + """Checks if the function accepts the arguments and keyword arguments. + Returns a new ``(args, kwargs)`` tuple that can safely be passed to + the function without causing a `TypeError` because the function signature + is incompatible. If `drop_extra` is set to `True` (which is the default) + any extra positional or keyword arguments are dropped automatically. + + The exception raised provides three attributes: + + `missing` + A set of argument names that the function expected but where + missing. + + `extra` + A dict of keyword arguments that the function can not handle but + where provided. + + `extra_positional` + A list of values that where given by positional argument but the + function cannot accept. + + This can be useful for decorators that forward user submitted data to + a view function:: + + from werkzeug.utils import ArgumentValidationError, validate_arguments + + def sanitize(f): + def proxy(request): + data = request.values.to_dict() + try: + args, kwargs = validate_arguments(f, (request,), data) + except ArgumentValidationError: + raise BadRequest('The browser failed to transmit all ' + 'the data expected.') + return f(*args, **kwargs) + return proxy + + :param func: the function the validation is performed against. + :param args: a tuple of positional arguments. + :param kwargs: a dict of keyword arguments. + :param drop_extra: set to `False` if you don't want extra arguments + to be silently dropped. + :return: tuple in the form ``(args, kwargs)``. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :func:`inspect.signature` + instead. + """ + warnings.warn( + "'utils.validate_arguments' is deprecated and will be removed" + " in Werkzeug 2.1. Use 'inspect.signature' instead.", + DeprecationWarning, + stacklevel=2, + ) + parser = _parse_signature(func) + args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5] + if missing: + raise ArgumentValidationError(tuple(missing)) + elif (extra or extra_positional) and not drop_extra: + raise ArgumentValidationError(None, extra, extra_positional) + return tuple(args), kwargs + + +def bind_arguments(func, args, kwargs): # type: ignore + """Bind the arguments provided into a dict. When passed a function, + a tuple of arguments and a dict of keyword arguments `bind_arguments` + returns a dict of names as the function would see it. This can be useful + to implement a cache decorator that uses the function arguments to build + the cache key based on the values of the arguments. + + :param func: the function the arguments should be bound for. + :param args: tuple of positional arguments. + :param kwargs: a dict of keyword arguments. + :return: a :class:`dict` of bound keyword arguments. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Use :meth:`Signature.bind` + instead. + """ + warnings.warn( + "'utils.bind_arguments' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'Signature.bind' instead.", + DeprecationWarning, + stacklevel=2, + ) + ( + args, + kwargs, + missing, + extra, + extra_positional, + arg_spec, + vararg_var, + kwarg_var, + ) = _parse_signature(func)(args, kwargs) + values = {} + for (name, _has_default, _default), value in zip(arg_spec, args): + values[name] = value + if vararg_var is not None: + values[vararg_var] = tuple(extra_positional) + elif extra_positional: + raise TypeError("too many positional arguments") + if kwarg_var is not None: + multikw = set(extra) & {x[0] for x in arg_spec} + if multikw: + raise TypeError( + f"got multiple values for keyword argument {next(iter(multikw))!r}" + ) + values[kwarg_var] = extra + elif extra: + raise TypeError(f"got unexpected keyword argument {next(iter(extra))!r}") + return values + + +class ArgumentValidationError(ValueError): + """Raised if :func:`validate_arguments` fails to validate + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1 along with ``utils.bind`` and + ``validate_arguments``. + """ + + def __init__(self, missing=None, extra=None, extra_positional=None): # type: ignore + self.missing = set(missing or ()) + self.extra = extra or {} + self.extra_positional = extra_positional or [] + super().__init__( + "function arguments invalid." + f" ({len(self.missing)} missing," + f" {len(self.extra) + len(self.extra_positional)} additional)" + ) + + +class ImportStringError(ImportError): + """Provides information about a failed :func:`import_string` attempt.""" + + #: String in dotted notation that failed to be imported. + import_name: str + #: Wrapped exception. + exception: BaseException + + def __init__(self, import_name: str, exception: BaseException) -> None: + self.import_name = import_name + self.exception = exception + msg = import_name + name = "" + tracked = [] + for part in import_name.replace(":", ".").split("."): + name = f"{name}.{part}" if name else part + imported = import_string(name, silent=True) + if imported: + tracked.append((name, getattr(imported, "__file__", None))) + else: + track = [f"- {n!r} found in {i!r}." for n, i in tracked] + track.append(f"- {name!r} not found.") + track_str = "\n".join(track) + msg = ( + f"import_string() failed for {import_name!r}. Possible reasons" + f" are:\n\n" + "- missing __init__.py in a package;\n" + "- package or module path not included in sys.path;\n" + "- duplicated package or module name taking precedence in" + " sys.path;\n" + "- missing module, class, function or variable;\n\n" + f"Debugged import:\n\n{track_str}\n\n" + f"Original exception:\n\n{type(exception).__name__}: {exception}" + ) + break + + super().__init__(msg) + + def __repr__(self) -> str: + return f"<{type(self).__name__}({self.import_name!r}, {self.exception!r})>" diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py new file mode 100644 index 0000000..eb69a99 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py @@ -0,0 +1,16 @@ +from .accept import AcceptMixin +from .auth import AuthorizationMixin +from .auth import WWWAuthenticateMixin +from .base_request import BaseRequest +from .base_response import BaseResponse +from .common_descriptors import CommonRequestDescriptorsMixin +from .common_descriptors import CommonResponseDescriptorsMixin +from .etag import ETagRequestMixin +from .etag import ETagResponseMixin +from .request import PlainRequest +from .request import Request as Request +from .request import StreamOnlyMixin +from .response import Response as Response +from .response import ResponseStream +from .response import ResponseStreamMixin +from .user_agent import UserAgentMixin diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py new file mode 100644 index 0000000..9605e63 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py @@ -0,0 +1,14 @@ +import typing as t +import warnings + + +class AcceptMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'AcceptMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py new file mode 100644 index 0000000..da31b7c --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py @@ -0,0 +1,26 @@ +import typing as t +import warnings + + +class AuthorizationMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'AuthorizationMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore + + +class WWWAuthenticateMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'WWWAuthenticateMixin' is deprecated and will be removed" + " in Werkzeug 2.1. 'Response' now includes the" + " functionality directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py new file mode 100644 index 0000000..451989f --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py @@ -0,0 +1,36 @@ +import typing as t +import warnings + +from .request import Request + + +class _FakeSubclassCheck(type): + def __subclasscheck__(cls, subclass: t.Type) -> bool: + warnings.warn( + "'BaseRequest' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.", + DeprecationWarning, + stacklevel=2, + ) + return issubclass(subclass, Request) + + def __instancecheck__(cls, instance: t.Any) -> bool: + warnings.warn( + "'BaseRequest' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.", + DeprecationWarning, + stacklevel=2, + ) + return isinstance(instance, Request) + + +class BaseRequest(Request, metaclass=_FakeSubclassCheck): + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'BaseRequest' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py new file mode 100644 index 0000000..3e0dc67 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py @@ -0,0 +1,36 @@ +import typing as t +import warnings + +from .response import Response + + +class _FakeSubclassCheck(type): + def __subclasscheck__(cls, subclass: t.Type) -> bool: + warnings.warn( + "'BaseResponse' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'issubclass(cls, Response)' instead.", + DeprecationWarning, + stacklevel=2, + ) + return issubclass(subclass, Response) + + def __instancecheck__(cls, instance: t.Any) -> bool: + warnings.warn( + "'BaseResponse' is deprecated and will be removed in" + " Werkzeug 2.1. Use 'isinstance(obj, Response)' instead.", + DeprecationWarning, + stacklevel=2, + ) + return isinstance(instance, Response) + + +class BaseResponse(Response, metaclass=_FakeSubclassCheck): + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'BaseResponse' is deprecated and will be removed in" + " Werkzeug 2.1. 'Response' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py new file mode 100644 index 0000000..db87ea5 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py @@ -0,0 +1,26 @@ +import typing as t +import warnings + + +class CommonRequestDescriptorsMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'CommonRequestDescriptorsMixin' is deprecated and will be" + " removed in Werkzeug 2.1. 'Request' now includes the" + " functionality directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore + + +class CommonResponseDescriptorsMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'CommonResponseDescriptorsMixin' is deprecated and will be" + " removed in Werkzeug 2.1. 'Response' now includes the" + " functionality directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py new file mode 100644 index 0000000..89cf83e --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py @@ -0,0 +1,26 @@ +import typing as t +import warnings + + +class CORSRequestMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'CORSRequestMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore + + +class CORSResponseMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'CORSResponseMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Response' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py new file mode 100644 index 0000000..2e9015a --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py @@ -0,0 +1,26 @@ +import typing as t +import warnings + + +class ETagRequestMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'ETagRequestMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore + + +class ETagResponseMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'ETagResponseMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Response' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py new file mode 100644 index 0000000..ab6ed7b --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py @@ -0,0 +1,13 @@ +import typing as t +import warnings + + +class JSONMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'JSONMixin' is deprecated and will be removed in Werkzeug" + " 2.1. 'Request' now includes the functionality directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py new file mode 100644 index 0000000..700cda0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py @@ -0,0 +1,660 @@ +import functools +import json +import typing +import typing as t +import warnings +from io import BytesIO + +from .._internal import _wsgi_decoding_dance +from ..datastructures import CombinedMultiDict +from ..datastructures import EnvironHeaders +from ..datastructures import FileStorage +from ..datastructures import ImmutableMultiDict +from ..datastructures import iter_multi_items +from ..datastructures import MultiDict +from ..formparser import default_stream_factory +from ..formparser import FormDataParser +from ..sansio.request import Request as _SansIORequest +from ..utils import cached_property +from ..utils import environ_property +from ..wsgi import _get_server +from ..wsgi import get_input_stream +from werkzeug.exceptions import BadRequest + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class Request(_SansIORequest): + """Represents an incoming WSGI HTTP request, with headers and body + taken from the WSGI environment. Has properties and methods for + using the functionality defined by various HTTP specs. The data in + requests object is read-only. + + Text data is assumed to use UTF-8 encoding, which should be true for + the vast majority of modern clients. Using an encoding set by the + client is unsafe in Python due to extra encodings it provides, such + as ``zip``. To change the assumed encoding, subclass and replace + :attr:`charset`. + + :param environ: The WSGI environ is generated by the WSGI server and + contains information about the server configuration and client + request. + :param populate_request: Add this request object to the WSGI environ + as ``environ['werkzeug.request']``. Can be useful when + debugging. + :param shallow: Makes reading from :attr:`stream` (and any method + that would read from it) raise a :exc:`RuntimeError`. Useful to + prevent consuming the form data in middleware, which would make + it unavailable to the final application. + + .. versionchanged:: 2.0 + Combine ``BaseRequest`` and mixins into a single ``Request`` + class. Using the old classes is deprecated and will be removed + in Werkzeug 2.1. + + .. versionchanged:: 0.5 + Read-only mode is enforced with immutable classes for all data. + """ + + #: the maximum content length. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: parsing fails because more than the specified value is transmitted + #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: Have a look at :doc:`/request_data` for more details. + #: + #: .. versionadded:: 0.5 + max_content_length: t.Optional[int] = None + + #: the maximum form field size. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: data in memory for post data is longer than the specified value a + #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: Have a look at :doc:`/request_data` for more details. + #: + #: .. versionadded:: 0.5 + max_form_memory_size: t.Optional[int] = None + + #: The form data parser that shoud be used. Can be replaced to customize + #: the form date parsing. + form_data_parser_class: t.Type[FormDataParser] = FormDataParser + + #: Disable the :attr:`data` property to avoid reading from the input + #: stream. + #: + #: .. deprecated:: 2.0 + #: Will be removed in Werkzeug 2.1. Create the request with + #: ``shallow=True`` instead. + #: + #: .. versionadded:: 0.9 + disable_data_descriptor: t.Optional[bool] = None + + #: The WSGI environment containing HTTP headers and information from + #: the WSGI server. + environ: "WSGIEnvironment" + + #: Set when creating the request object. If ``True``, reading from + #: the request body will cause a ``RuntimeException``. Useful to + #: prevent modifying the stream from middleware. + shallow: bool + + def __init__( + self, + environ: "WSGIEnvironment", + populate_request: bool = True, + shallow: bool = False, + ) -> None: + super().__init__( + method=environ.get("REQUEST_METHOD", "GET"), + scheme=environ.get("wsgi.url_scheme", "http"), + server=_get_server(environ), + root_path=_wsgi_decoding_dance( + environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors + ), + path=_wsgi_decoding_dance( + environ.get("PATH_INFO") or "", self.charset, self.encoding_errors + ), + query_string=environ.get("QUERY_STRING", "").encode("latin1"), + headers=EnvironHeaders(environ), + remote_addr=environ.get("REMOTE_ADDR"), + ) + self.environ = environ + + if self.disable_data_descriptor is not None: + warnings.warn( + "'disable_data_descriptor' is deprecated and will be" + " removed in Werkzeug 2.1. Create the request with" + " 'shallow=True' instead.", + DeprecationWarning, + stacklevel=2, + ) + shallow = shallow or self.disable_data_descriptor + + self.shallow = shallow + + if populate_request and not shallow: + self.environ["werkzeug.request"] = self + + @classmethod + def from_values(cls, *args: t.Any, **kwargs: t.Any) -> "Request": + """Create a new request object based on the values provided. If + environ is given missing values are filled from there. This method is + useful for small scripts when you need to simulate a request from an URL. + Do not use this method for unittesting, there is a full featured client + object (:class:`Client`) that allows to create multipart requests, + support for cookies etc. + + This accepts the same options as the + :class:`~werkzeug.test.EnvironBuilder`. + + .. versionchanged:: 0.5 + This method now accepts the same arguments as + :class:`~werkzeug.test.EnvironBuilder`. Because of this the + `environ` parameter is now called `environ_overrides`. + + :return: request object + """ + from ..test import EnvironBuilder + + charset = kwargs.pop("charset", cls.charset) + kwargs["charset"] = charset + builder = EnvironBuilder(*args, **kwargs) + try: + return builder.get_request(cls) + finally: + builder.close() + + @classmethod + def application( + cls, f: t.Callable[["Request"], "WSGIApplication"] + ) -> "WSGIApplication": + """Decorate a function as responder that accepts the request as + the last argument. This works like the :func:`responder` + decorator but the function is passed the request object as the + last argument and the request object will be closed + automatically:: + + @Request.application + def my_wsgi_app(request): + return Response('Hello World!') + + As of Werkzeug 0.14 HTTP exceptions are automatically caught and + converted to responses instead of failing. + + :param f: the WSGI callable to decorate + :return: a new WSGI callable + """ + #: return a callable that wraps the -2nd argument with the request + #: and calls the function with all the arguments up to that one and + #: the request. The return value is then called with the latest + #: two arguments. This makes it possible to use this decorator for + #: both standalone WSGI functions as well as bound methods and + #: partially applied functions. + from ..exceptions import HTTPException + + @functools.wraps(f) + def application(*args): # type: ignore + request = cls(args[-2]) + with request: + try: + resp = f(*args[:-2] + (request,)) + except HTTPException as e: + resp = e.get_response(args[-2]) + return resp(*args[-2:]) + + return t.cast("WSGIApplication", application) + + def _get_file_stream( + self, + total_content_length: t.Optional[int], + content_type: t.Optional[str], + filename: t.Optional[str] = None, + content_length: t.Optional[int] = None, + ) -> t.IO[bytes]: + """Called to get a stream for the file upload. + + This must provide a file-like class with `read()`, `readline()` + and `seek()` methods that is both writeable and readable. + + The default implementation returns a temporary file if the total + content length is higher than 500KB. Because many browsers do not + provide a content length for the files only the total content + length matters. + + :param total_content_length: the total content length of all the + data in the request combined. This value + is guaranteed to be there. + :param content_type: the mimetype of the uploaded file. + :param filename: the filename of the uploaded file. May be `None`. + :param content_length: the length of this file. This value is usually + not provided because webbrowsers do not provide + this value. + """ + return default_stream_factory( + total_content_length=total_content_length, + filename=filename, + content_type=content_type, + content_length=content_length, + ) + + @property + def want_form_data_parsed(self) -> bool: + """``True`` if the request method carries content. By default + this is true if a ``Content-Type`` is sent. + + .. versionadded:: 0.8 + """ + return bool(self.environ.get("CONTENT_TYPE")) + + def make_form_data_parser(self) -> FormDataParser: + """Creates the form data parser. Instantiates the + :attr:`form_data_parser_class` with some parameters. + + .. versionadded:: 0.8 + """ + return self.form_data_parser_class( + self._get_file_stream, + self.charset, + self.encoding_errors, + self.max_form_memory_size, + self.max_content_length, + self.parameter_storage_class, + ) + + def _load_form_data(self) -> None: + """Method used internally to retrieve submitted data. After calling + this sets `form` and `files` on the request object to multi dicts + filled with the incoming form data. As a matter of fact the input + stream will be empty afterwards. You can also call this method to + force the parsing of the form data. + + .. versionadded:: 0.8 + """ + # abort early if we have already consumed the stream + if "form" in self.__dict__: + return + + if self.want_form_data_parsed: + parser = self.make_form_data_parser() + data = parser.parse( + self._get_stream_for_parsing(), + self.mimetype, + self.content_length, + self.mimetype_params, + ) + else: + data = ( + self.stream, + self.parameter_storage_class(), + self.parameter_storage_class(), + ) + + # inject the values into the instance dict so that we bypass + # our cached_property non-data descriptor. + d = self.__dict__ + d["stream"], d["form"], d["files"] = data + + def _get_stream_for_parsing(self) -> t.IO[bytes]: + """This is the same as accessing :attr:`stream` with the difference + that if it finds cached data from calling :meth:`get_data` first it + will create a new stream out of the cached data. + + .. versionadded:: 0.9.3 + """ + cached_data = getattr(self, "_cached_data", None) + if cached_data is not None: + return BytesIO(cached_data) + return self.stream + + def close(self) -> None: + """Closes associated resources of this request object. This + closes all file handles explicitly. You can also use the request + object in a with statement which will automatically close it. + + .. versionadded:: 0.9 + """ + files = self.__dict__.get("files") + for _key, value in iter_multi_items(files or ()): + value.close() + + def __enter__(self) -> "Request": + return self + + def __exit__(self, exc_type, exc_value, tb) -> None: # type: ignore + self.close() + + @cached_property + def stream(self) -> t.IO[bytes]: + """ + If the incoming form data was not encoded with a known mimetype + the data is stored unmodified in this stream for consumption. Most + of the time it is a better idea to use :attr:`data` which will give + you that data as a string. The stream only returns the data once. + + Unlike :attr:`input_stream` this stream is properly guarded that you + can't accidentally read past the length of the input. Werkzeug will + internally always refer to this stream to read data which makes it + possible to wrap this object with a stream that does filtering. + + .. versionchanged:: 0.9 + This stream is now always available but might be consumed by the + form parser later on. Previously the stream was only set if no + parsing happened. + """ + if self.shallow: + raise RuntimeError( + "This request was created with 'shallow=True', reading" + " from the input stream is disabled." + ) + + return get_input_stream(self.environ) + + input_stream = environ_property[t.IO[bytes]]( + "wsgi.input", + doc="""The WSGI input stream. + + In general it's a bad idea to use this one because you can + easily read past the boundary. Use the :attr:`stream` + instead.""", + ) + + @cached_property + def data(self) -> bytes: + """ + Contains the incoming request data as string in case it came with + a mimetype Werkzeug does not handle. + """ + return self.get_data(parse_form_data=True) + + @typing.overload + def get_data( # type: ignore + self, + cache: bool = True, + as_text: "te.Literal[False]" = False, + parse_form_data: bool = False, + ) -> bytes: + ... + + @typing.overload + def get_data( + self, + cache: bool = True, + as_text: "te.Literal[True]" = ..., + parse_form_data: bool = False, + ) -> str: + ... + + def get_data( + self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False + ) -> t.Union[bytes, str]: + """This reads the buffered incoming data from the client into one + bytes object. By default this is cached but that behavior can be + changed by setting `cache` to `False`. + + Usually it's a bad idea to call this method without checking the + content length first as a client could send dozens of megabytes or more + to cause memory problems on the server. + + Note that if the form data was already parsed this method will not + return anything as form data parsing does not cache the data like + this method does. To implicitly invoke form data parsing function + set `parse_form_data` to `True`. When this is done the return value + of this method will be an empty string if the form parser handles + the data. This generally is not necessary as if the whole data is + cached (which is the default) the form parser will used the cached + data to parse the form data. Please be generally aware of checking + the content length first in any case before calling this method + to avoid exhausting server memory. + + If `as_text` is set to `True` the return value will be a decoded + string. + + .. versionadded:: 0.9 + """ + rv = getattr(self, "_cached_data", None) + if rv is None: + if parse_form_data: + self._load_form_data() + rv = self.stream.read() + if cache: + self._cached_data = rv + if as_text: + rv = rv.decode(self.charset, self.encoding_errors) + return rv # type: ignore + + @cached_property + def form(self) -> "ImmutableMultiDict[str, str]": + """The form parameters. By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + + Please keep in mind that file uploads will not end up here, but instead + in the :attr:`files` attribute. + + .. versionchanged:: 0.9 + + Previous to Werkzeug 0.9 this would only contain form data for POST + and PUT requests. + """ + self._load_form_data() + return self.form + + @cached_property + def values(self) -> "CombinedMultiDict[str, str]": + """A :class:`werkzeug.datastructures.CombinedMultiDict` that + combines :attr:`args` and :attr:`form`. + + For GET requests, only ``args`` are present, not ``form``. + + .. versionchanged:: 2.0 + For GET requests, only ``args`` are present, not ``form``. + """ + sources = [self.args] + + if self.method != "GET": + # GET requests can have a body, and some caching proxies + # might not treat that differently than a normal GET + # request, allowing form data to "invisibly" affect the + # cache without indication in the query string / URL. + sources.append(self.form) + + args = [] + + for d in sources: + if not isinstance(d, MultiDict): + d = MultiDict(d) + + args.append(d) + + return CombinedMultiDict(args) + + @cached_property + def files(self) -> "ImmutableMultiDict[str, FileStorage]": + """:class:`~werkzeug.datastructures.MultiDict` object containing + all uploaded files. Each key in :attr:`files` is the name from the + ````. Each value in :attr:`files` is a + Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. + + It basically behaves like a standard file object you know from Python, + with the difference that it also has a + :meth:`~werkzeug.datastructures.FileStorage.save` function that can + store the file on the filesystem. + + Note that :attr:`files` will only contain data if the request method was + POST, PUT or PATCH and the ``

    `` that posted to the request had + ``enctype="multipart/form-data"``. It will be empty otherwise. + + See the :class:`~werkzeug.datastructures.MultiDict` / + :class:`~werkzeug.datastructures.FileStorage` documentation for + more details about the used data structure. + """ + self._load_form_data() + return self.files + + @property + def script_root(self) -> str: + """Alias for :attr:`self.root_path`. ``environ["SCRIPT_ROOT"]`` + without a trailing slash. + """ + return self.root_path + + @cached_property + def url_root(self) -> str: + """Alias for :attr:`root_url`. The URL with scheme, host, and + root path. For example, ``https://example.com/app/``. + """ + return self.root_url + + remote_user = environ_property[str]( + "REMOTE_USER", + doc="""If the server supports user authentication, and the + script is protected, this attribute contains the username the + user has authenticated as.""", + ) + is_multithread = environ_property[bool]( + "wsgi.multithread", + doc="""boolean that is `True` if the application is served by a + multithreaded WSGI server.""", + ) + is_multiprocess = environ_property[bool]( + "wsgi.multiprocess", + doc="""boolean that is `True` if the application is served by a + WSGI server that spawns multiple processes.""", + ) + is_run_once = environ_property[bool]( + "wsgi.run_once", + doc="""boolean that is `True` if the application will be + executed only once in a process lifetime. This is the case for + CGI for example, but it's not guaranteed that the execution only + happens one time.""", + ) + + # JSON + + #: A module or other object that has ``dumps`` and ``loads`` + #: functions that match the API of the built-in :mod:`json` module. + json_module = json + + @property + def json(self) -> t.Optional[t.Any]: + """The parsed JSON data if :attr:`mimetype` indicates JSON + (:mimetype:`application/json`, see :attr:`is_json`). + + Calls :meth:`get_json` with default arguments. + """ + return self.get_json() + + # Cached values for ``(silent=False, silent=True)``. Initialized + # with sentinel values. + _cached_json: t.Tuple[t.Any, t.Any] = (Ellipsis, Ellipsis) + + def get_json( + self, force: bool = False, silent: bool = False, cache: bool = True + ) -> t.Optional[t.Any]: + """Parse :attr:`data` as JSON. + + If the mimetype does not indicate JSON + (:mimetype:`application/json`, see :attr:`is_json`), this + returns ``None``. + + If parsing fails, :meth:`on_json_loading_failed` is called and + its return value is used as the return value. + + :param force: Ignore the mimetype and always try to parse JSON. + :param silent: Silence parsing errors and return ``None`` + instead. + :param cache: Store the parsed JSON to return for subsequent + calls. + """ + if cache and self._cached_json[silent] is not Ellipsis: + return self._cached_json[silent] + + if not (force or self.is_json): + return None + + data = self.get_data(cache=cache) + + try: + rv = self.json_module.loads(data) + except ValueError as e: + if silent: + rv = None + + if cache: + normal_rv, _ = self._cached_json + self._cached_json = (normal_rv, rv) + else: + rv = self.on_json_loading_failed(e) + + if cache: + _, silent_rv = self._cached_json + self._cached_json = (rv, silent_rv) + else: + if cache: + self._cached_json = (rv, rv) + + return rv + + def on_json_loading_failed(self, e: ValueError) -> t.Any: + """Called if :meth:`get_json` parsing fails and isn't silenced. + If this method returns a value, it is used as the return value + for :meth:`get_json`. The default implementation raises + :exc:`~werkzeug.exceptions.BadRequest`. + """ + raise BadRequest(f"Failed to decode JSON object: {e}") + + +class StreamOnlyMixin: + """Mixin to create a ``Request`` that disables the ``data``, + ``form``, and ``files`` properties. Only ``stream`` is available. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Create the request with + ``shallow=True`` instead. + + .. versionadded:: 0.9 + """ + + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'StreamOnlyMixin' is deprecated and will be removed in" + " Werkzeug 2.1. Create the request with 'shallow=True'" + " instead.", + DeprecationWarning, + stacklevel=2, + ) + kwargs["shallow"] = True + super().__init__(*args, **kwargs) # type: ignore + + +class PlainRequest(StreamOnlyMixin, Request): + """A request object without ``data``, ``form``, and ``files``. + + .. deprecated:: 2.0 + Will be removed in Werkzeug 2.1. Create the request with + ``shallow=True`` instead. + + .. versionadded:: 0.9 + """ + + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'PlainRequest' is deprecated and will be removed in" + " Werkzeug 2.1. Create the request with 'shallow=True'" + " instead.", + DeprecationWarning, + stacklevel=2, + ) + + # Don't show the DeprecationWarning for StreamOnlyMixin. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py new file mode 100644 index 0000000..d365c4e --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py @@ -0,0 +1,890 @@ +import json +import typing +import typing as t +import warnings +from http import HTTPStatus + +from .._internal import _to_bytes +from ..datastructures import Headers +from ..http import remove_entity_headers +from ..sansio.response import Response as _SansIOResponse +from ..urls import iri_to_uri +from ..urls import url_join +from ..utils import cached_property +from ..wsgi import ClosingIterator +from ..wsgi import get_current_url +from werkzeug._internal import _get_environ +from werkzeug.http import generate_etag +from werkzeug.http import http_date +from werkzeug.http import is_resource_modified +from werkzeug.http import parse_etags +from werkzeug.http import parse_range_header +from werkzeug.wsgi import _RangeWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def _warn_if_string(iterable: t.Iterable) -> None: + """Helper for the response objects to check if the iterable returned + to the WSGI server is not a string. + """ + if isinstance(iterable, str): + warnings.warn( + "Response iterable was set to a string. This will appear to" + " work but means that the server will send the data to the" + " client one character at a time. This is almost never" + " intended behavior, use 'response.data' to assign strings" + " to the response object.", + stacklevel=2, + ) + + +def _iter_encoded( + iterable: t.Iterable[t.Union[str, bytes]], charset: str +) -> t.Iterator[bytes]: + for item in iterable: + if isinstance(item, str): + yield item.encode(charset) + else: + yield item + + +def _clean_accept_ranges(accept_ranges: t.Union[bool, str]) -> str: + if accept_ranges is True: + return "bytes" + elif accept_ranges is False: + return "none" + elif isinstance(accept_ranges, str): + return accept_ranges + raise ValueError("Invalid accept_ranges value") + + +class Response(_SansIOResponse): + """Represents an outgoing WSGI HTTP response with body, status, and + headers. Has properties and methods for using the functionality + defined by various HTTP specs. + + The response body is flexible to support different use cases. The + simple form is passing bytes, or a string which will be encoded as + UTF-8. Passing an iterable of bytes or strings makes this a + streaming response. A generator is particularly useful for building + a CSV file in memory or using SSE (Server Sent Events). A file-like + object is also iterable, although the + :func:`~werkzeug.utils.send_file` helper should be used in that + case. + + The response object is itself a WSGI application callable. When + called (:meth:`__call__`) with ``environ`` and ``start_response``, + it will pass its status and headers to ``start_response`` then + return its body as an iterable. + + .. code-block:: python + + from werkzeug.wrappers.response import Response + + def index(): + return Response("Hello, World!") + + def application(environ, start_response): + path = environ.get("PATH_INFO") or "/" + + if path == "/": + response = index() + else: + response = Response("Not Found", status=404) + + return response(environ, start_response) + + :param response: The data for the body of the response. A string or + bytes, or tuple or list of strings or bytes, for a fixed-length + response, or any other iterable of strings or bytes for a + streaming response. Defaults to an empty body. + :param status: The status code for the response. Either an int, in + which case the default status message is added, or a string in + the form ``{code} {message}``, like ``404 Not Found``. Defaults + to 200. + :param headers: A :class:`~werkzeug.datastructures.Headers` object, + or a list of ``(key, value)`` tuples that will be converted to a + ``Headers`` object. + :param mimetype: The mime type (content type without charset or + other parameters) of the response. If the value starts with + ``text/`` (or matches some other special cases), the charset + will be added to create the ``content_type``. + :param content_type: The full content type of the response. + Overrides building the value from ``mimetype``. + :param direct_passthrough: Pass the response body directly through + as the WSGI iterable. This can be used when the body is a binary + file or other iterator of bytes, to skip some unnecessary + checks. Use :func:`~werkzeug.utils.send_file` instead of setting + this manually. + + .. versionchanged:: 2.0 + Combine ``BaseResponse`` and mixins into a single ``Response`` + class. Using the old classes is deprecated and will be removed + in Werkzeug 2.1. + + .. versionchanged:: 0.5 + The ``direct_passthrough`` parameter was added. + """ + + #: if set to `False` accessing properties on the response object will + #: not try to consume the response iterator and convert it into a list. + #: + #: .. versionadded:: 0.6.2 + #: + #: That attribute was previously called `implicit_seqence_conversion`. + #: (Notice the typo). If you did use this feature, you have to adapt + #: your code to the name change. + implicit_sequence_conversion = True + + #: Should this response object correct the location header to be RFC + #: conformant? This is true by default. + #: + #: .. versionadded:: 0.8 + autocorrect_location_header = True + + #: Should this response object automatically set the content-length + #: header if possible? This is true by default. + #: + #: .. versionadded:: 0.8 + automatically_set_content_length = True + + #: The response body to send as the WSGI iterable. A list of strings + #: or bytes represents a fixed-length response, any other iterable + #: is a streaming response. Strings are encoded to bytes as UTF-8. + #: + #: Do not set to a plain string or bytes, that will cause sending + #: the response to be very inefficient as it will iterate one byte + #: at a time. + response: t.Union[t.Iterable[str], t.Iterable[bytes]] + + def __init__( + self, + response: t.Optional[ + t.Union[t.Iterable[bytes], bytes, t.Iterable[str], str] + ] = None, + status: t.Optional[t.Union[int, str, HTTPStatus]] = None, + headers: t.Optional[ + t.Union[ + t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]], + t.Iterable[t.Tuple[str, t.Union[str, int]]], + ] + ] = None, + mimetype: t.Optional[str] = None, + content_type: t.Optional[str] = None, + direct_passthrough: bool = False, + ) -> None: + super().__init__( + status=status, + headers=headers, + mimetype=mimetype, + content_type=content_type, + ) + + #: Pass the response body directly through as the WSGI iterable. + #: This can be used when the body is a binary file or other + #: iterator of bytes, to skip some unnecessary checks. Use + #: :func:`~werkzeug.utils.send_file` instead of setting this + #: manually. + self.direct_passthrough = direct_passthrough + self._on_close: t.List[t.Callable[[], t.Any]] = [] + + # we set the response after the headers so that if a class changes + # the charset attribute, the data is set in the correct charset. + if response is None: + self.response = [] + elif isinstance(response, (str, bytes, bytearray)): + self.set_data(response) + else: + self.response = response + + def call_on_close(self, func: t.Callable[[], t.Any]) -> t.Callable[[], t.Any]: + """Adds a function to the internal list of functions that should + be called as part of closing down the response. Since 0.7 this + function also returns the function that was passed so that this + can be used as a decorator. + + .. versionadded:: 0.6 + """ + self._on_close.append(func) + return func + + def __repr__(self) -> str: + if self.is_sequence: + body_info = f"{sum(map(len, self.iter_encoded()))} bytes" + else: + body_info = "streamed" if self.is_streamed else "likely-streamed" + return f"<{type(self).__name__} {body_info} [{self.status}]>" + + @classmethod + def force_type( + cls, response: "Response", environ: t.Optional["WSGIEnvironment"] = None + ) -> "Response": + """Enforce that the WSGI response is a response object of the current + type. Werkzeug will use the :class:`Response` internally in many + situations like the exceptions. If you call :meth:`get_response` on an + exception you will get back a regular :class:`Response` object, even + if you are using a custom subclass. + + This method can enforce a given response type, and it will also + convert arbitrary WSGI callables into response objects if an environ + is provided:: + + # convert a Werkzeug response object into an instance of the + # MyResponseClass subclass. + response = MyResponseClass.force_type(response) + + # convert any WSGI application into a response object + response = MyResponseClass.force_type(response, environ) + + This is especially useful if you want to post-process responses in + the main dispatcher and use functionality provided by your subclass. + + Keep in mind that this will modify response objects in place if + possible! + + :param response: a response object or wsgi application. + :param environ: a WSGI environment object. + :return: a response object. + """ + if not isinstance(response, Response): + if environ is None: + raise TypeError( + "cannot convert WSGI application into response" + " objects without an environ" + ) + + from ..test import run_wsgi_app + + response = Response(*run_wsgi_app(response, environ)) + + response.__class__ = cls + return response + + @classmethod + def from_app( + cls, app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False + ) -> "Response": + """Create a new response object from an application output. This + works best if you pass it an application that returns a generator all + the time. Sometimes applications may use the `write()` callable + returned by the `start_response` function. This tries to resolve such + edge cases automatically. But if you don't get the expected output + you should set `buffered` to `True` which enforces buffering. + + :param app: the WSGI application to execute. + :param environ: the WSGI environment to execute against. + :param buffered: set to `True` to enforce buffering. + :return: a response object. + """ + from ..test import run_wsgi_app + + return cls(*run_wsgi_app(app, environ, buffered)) + + @typing.overload + def get_data(self, as_text: "te.Literal[False]" = False) -> bytes: + ... + + @typing.overload + def get_data(self, as_text: "te.Literal[True]") -> str: + ... + + def get_data(self, as_text: bool = False) -> t.Union[bytes, str]: + """The string representation of the response body. Whenever you call + this property the response iterable is encoded and flattened. This + can lead to unwanted behavior if you stream big data. + + This behavior can be disabled by setting + :attr:`implicit_sequence_conversion` to `False`. + + If `as_text` is set to `True` the return value will be a decoded + string. + + .. versionadded:: 0.9 + """ + self._ensure_sequence() + rv = b"".join(self.iter_encoded()) + + if as_text: + return rv.decode(self.charset) + + return rv + + def set_data(self, value: t.Union[bytes, str]) -> None: + """Sets a new string as response. The value must be a string or + bytes. If a string is set it's encoded to the charset of the + response (utf-8 by default). + + .. versionadded:: 0.9 + """ + # if a string is set, it's encoded directly so that we + # can set the content length + if isinstance(value, str): + value = value.encode(self.charset) + else: + value = bytes(value) + self.response = [value] + if self.automatically_set_content_length: + self.headers["Content-Length"] = str(len(value)) + + data = property( + get_data, + set_data, + doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.", + ) + + def calculate_content_length(self) -> t.Optional[int]: + """Returns the content length if available or `None` otherwise.""" + try: + self._ensure_sequence() + except RuntimeError: + return None + return sum(len(x) for x in self.iter_encoded()) + + def _ensure_sequence(self, mutable: bool = False) -> None: + """This method can be called by methods that need a sequence. If + `mutable` is true, it will also ensure that the response sequence + is a standard Python list. + + .. versionadded:: 0.6 + """ + if self.is_sequence: + # if we need a mutable object, we ensure it's a list. + if mutable and not isinstance(self.response, list): + self.response = list(self.response) # type: ignore + return + if self.direct_passthrough: + raise RuntimeError( + "Attempted implicit sequence conversion but the" + " response object is in direct passthrough mode." + ) + if not self.implicit_sequence_conversion: + raise RuntimeError( + "The response object required the iterable to be a" + " sequence, but the implicit conversion was disabled." + " Call make_sequence() yourself." + ) + self.make_sequence() + + def make_sequence(self) -> None: + """Converts the response iterator in a list. By default this happens + automatically if required. If `implicit_sequence_conversion` is + disabled, this method is not automatically called and some properties + might raise exceptions. This also encodes all the items. + + .. versionadded:: 0.6 + """ + if not self.is_sequence: + # if we consume an iterable we have to ensure that the close + # method of the iterable is called if available when we tear + # down the response + close = getattr(self.response, "close", None) + self.response = list(self.iter_encoded()) + if close is not None: + self.call_on_close(close) + + def iter_encoded(self) -> t.Iterator[bytes]: + """Iter the response encoded with the encoding of the response. + If the response object is invoked as WSGI application the return + value of this method is used as application iterator unless + :attr:`direct_passthrough` was activated. + """ + if __debug__: + _warn_if_string(self.response) + # Encode in a separate function so that self.response is fetched + # early. This allows us to wrap the response with the return + # value from get_app_iter or iter_encoded. + return _iter_encoded(self.response, self.charset) + + @property + def is_streamed(self) -> bool: + """If the response is streamed (the response is not an iterable with + a length information) this property is `True`. In this case streamed + means that there is no information about the number of iterations. + This is usually `True` if a generator is passed to the response object. + + This is useful for checking before applying some sort of post + filtering that should not take place for streamed responses. + """ + try: + len(self.response) # type: ignore + except (TypeError, AttributeError): + return True + return False + + @property + def is_sequence(self) -> bool: + """If the iterator is buffered, this property will be `True`. A + response object will consider an iterator to be buffered if the + response attribute is a list or tuple. + + .. versionadded:: 0.6 + """ + return isinstance(self.response, (tuple, list)) + + def close(self) -> None: + """Close the wrapped response if possible. You can also use the object + in a with statement which will automatically close it. + + .. versionadded:: 0.9 + Can now be used in a with statement. + """ + if hasattr(self.response, "close"): + self.response.close() # type: ignore + for func in self._on_close: + func() + + def __enter__(self) -> "Response": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close() + + def freeze(self, no_etag: None = None) -> None: + """Make the response object ready to be pickled. Does the + following: + + * Buffer the response into a list, ignoring + :attr:`implicity_sequence_conversion` and + :attr:`direct_passthrough`. + * Set the ``Content-Length`` header. + * Generate an ``ETag`` header if one is not already set. + + .. versionchanged:: 2.0 + An ``ETag`` header is added, the ``no_etag`` parameter is + deprecated and will be removed in Werkzeug 2.1. + + .. versionchanged:: 0.6 + The ``Content-Length`` header is set. + """ + # Always freeze the encoded response body, ignore + # implicit_sequence_conversion and direct_passthrough. + self.response = list(self.iter_encoded()) + self.headers["Content-Length"] = str(sum(map(len, self.response))) + + if no_etag is not None: + warnings.warn( + "The 'no_etag' parameter is deprecated and will be" + " removed in Werkzeug 2.1.", + DeprecationWarning, + stacklevel=2, + ) + + self.add_etag() + + def get_wsgi_headers(self, environ: "WSGIEnvironment") -> Headers: + """This is automatically called right before the response is started + and returns headers modified for the given environment. It returns a + copy of the headers from the response with some modifications applied + if necessary. + + For example the location header (if present) is joined with the root + URL of the environment. Also the content length is automatically set + to zero here for certain status codes. + + .. versionchanged:: 0.6 + Previously that function was called `fix_headers` and modified + the response object in place. Also since 0.6, IRIs in location + and content-location headers are handled properly. + + Also starting with 0.6, Werkzeug will attempt to set the content + length if it is able to figure it out on its own. This is the + case if all the strings in the response iterable are already + encoded and the iterable is buffered. + + :param environ: the WSGI environment of the request. + :return: returns a new :class:`~werkzeug.datastructures.Headers` + object. + """ + headers = Headers(self.headers) + location: t.Optional[str] = None + content_location: t.Optional[str] = None + content_length: t.Optional[t.Union[str, int]] = None + status = self.status_code + + # iterate over the headers to find all values in one go. Because + # get_wsgi_headers is used each response that gives us a tiny + # speedup. + for key, value in headers: + ikey = key.lower() + if ikey == "location": + location = value + elif ikey == "content-location": + content_location = value + elif ikey == "content-length": + content_length = value + + # make sure the location header is an absolute URL + if location is not None: + old_location = location + if isinstance(location, str): + # Safe conversion is necessary here as we might redirect + # to a broken URI scheme (for instance itms-services). + location = iri_to_uri(location, safe_conversion=True) + + if self.autocorrect_location_header: + current_url = get_current_url(environ, strip_querystring=True) + if isinstance(current_url, str): + current_url = iri_to_uri(current_url) + location = url_join(current_url, location) + if location != old_location: + headers["Location"] = location + + # make sure the content location is a URL + if content_location is not None and isinstance(content_location, str): + headers["Content-Location"] = iri_to_uri(content_location) + + if 100 <= status < 200 or status == 204: + # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a + # Content-Length header field in any response with a status + # code of 1xx (Informational) or 204 (No Content)." + headers.remove("Content-Length") + elif status == 304: + remove_entity_headers(headers) + + # if we can determine the content length automatically, we + # should try to do that. But only if this does not involve + # flattening the iterator or encoding of strings in the + # response. We however should not do that if we have a 304 + # response. + if ( + self.automatically_set_content_length + and self.is_sequence + and content_length is None + and status not in (204, 304) + and not (100 <= status < 200) + ): + try: + content_length = sum(len(_to_bytes(x, "ascii")) for x in self.response) + except UnicodeError: + # Something other than bytes, can't safely figure out + # the length of the response. + pass + else: + headers["Content-Length"] = str(content_length) + + return headers + + def get_app_iter(self, environ: "WSGIEnvironment") -> t.Iterable[bytes]: + """Returns the application iterator for the given environ. Depending + on the request method and the current status code the return value + might be an empty response rather than the one from the response. + + If the request method is `HEAD` or the status code is in a range + where the HTTP specification requires an empty response, an empty + iterable is returned. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: a response iterable. + """ + status = self.status_code + if ( + environ["REQUEST_METHOD"] == "HEAD" + or 100 <= status < 200 + or status in (204, 304) + ): + iterable: t.Iterable[bytes] = () + elif self.direct_passthrough: + if __debug__: + _warn_if_string(self.response) + return self.response # type: ignore + else: + iterable = self.iter_encoded() + return ClosingIterator(iterable, self.close) + + def get_wsgi_response( + self, environ: "WSGIEnvironment" + ) -> t.Tuple[t.Iterable[bytes], str, t.List[t.Tuple[str, str]]]: + """Returns the final WSGI response as tuple. The first item in + the tuple is the application iterator, the second the status and + the third the list of headers. The response returned is created + specially for the given environment. For example if the request + method in the WSGI environment is ``'HEAD'`` the response will + be empty and only the headers and status code will be present. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: an ``(app_iter, status, headers)`` tuple. + """ + headers = self.get_wsgi_headers(environ) + app_iter = self.get_app_iter(environ) + return app_iter, self.status, headers.to_wsgi_list() + + def __call__( + self, environ: "WSGIEnvironment", start_response: "StartResponse" + ) -> t.Iterable[bytes]: + """Process this response as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + :return: an application iterator + """ + app_iter, status, headers = self.get_wsgi_response(environ) + start_response(status, headers) + return app_iter + + # JSON + + #: A module or other object that has ``dumps`` and ``loads`` + #: functions that match the API of the built-in :mod:`json` module. + json_module = json + + @property + def json(self) -> t.Optional[t.Any]: + """The parsed JSON data if :attr:`mimetype` indicates JSON + (:mimetype:`application/json`, see :attr:`is_json`). + + Calls :meth:`get_json` with default arguments. + """ + return self.get_json() + + def get_json(self, force: bool = False, silent: bool = False) -> t.Optional[t.Any]: + """Parse :attr:`data` as JSON. Useful during testing. + + If the mimetype does not indicate JSON + (:mimetype:`application/json`, see :attr:`is_json`), this + returns ``None``. + + Unlike :meth:`Request.get_json`, the result is not cached. + + :param force: Ignore the mimetype and always try to parse JSON. + :param silent: Silence parsing errors and return ``None`` + instead. + """ + if not (force or self.is_json): + return None + + data = self.get_data() + + try: + return self.json_module.loads(data) + except ValueError: + if not silent: + raise + + return None + + # Stream + + @cached_property + def stream(self) -> "ResponseStream": + """The response iterable as write-only stream.""" + return ResponseStream(self) + + def _wrap_range_response(self, start: int, length: int) -> None: + """Wrap existing Response in case of Range Request context.""" + if self.status_code == 206: + self.response = _RangeWrapper(self.response, start, length) # type: ignore + + def _is_range_request_processable(self, environ: "WSGIEnvironment") -> bool: + """Return ``True`` if `Range` header is present and if underlying + resource is considered unchanged when compared with `If-Range` header. + """ + return ( + "HTTP_IF_RANGE" not in environ + or not is_resource_modified( + environ, + self.headers.get("etag"), + None, + self.headers.get("last-modified"), + ignore_if_range=False, + ) + ) and "HTTP_RANGE" in environ + + def _process_range_request( + self, + environ: "WSGIEnvironment", + complete_length: t.Optional[int] = None, + accept_ranges: t.Optional[t.Union[bool, str]] = None, + ) -> bool: + """Handle Range Request related headers (RFC7233). If `Accept-Ranges` + header is valid, and Range Request is processable, we set the headers + as described by the RFC, and wrap the underlying response in a + RangeWrapper. + + Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. + + :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` + if `Range` header could not be parsed or satisfied. + + .. versionchanged:: 2.0 + Returns ``False`` if the length is 0. + """ + from ..exceptions import RequestedRangeNotSatisfiable + + if ( + accept_ranges is None + or complete_length is None + or complete_length == 0 + or not self._is_range_request_processable(environ) + ): + return False + + parsed_range = parse_range_header(environ.get("HTTP_RANGE")) + + if parsed_range is None: + raise RequestedRangeNotSatisfiable(complete_length) + + range_tuple = parsed_range.range_for_length(complete_length) + content_range_header = parsed_range.to_content_range_header(complete_length) + + if range_tuple is None or content_range_header is None: + raise RequestedRangeNotSatisfiable(complete_length) + + content_length = range_tuple[1] - range_tuple[0] + self.headers["Content-Length"] = content_length + self.headers["Accept-Ranges"] = accept_ranges + self.content_range = content_range_header # type: ignore + self.status_code = 206 + self._wrap_range_response(range_tuple[0], content_length) + return True + + def make_conditional( + self, + request_or_environ: "WSGIEnvironment", + accept_ranges: t.Union[bool, str] = False, + complete_length: t.Optional[int] = None, + ) -> "Response": + """Make the response conditional to the request. This method works + best if an etag was defined for the response already. The `add_etag` + method can be used to do that. If called without etag just the date + header is set. + + This does nothing if the request method in the request or environ is + anything but GET or HEAD. + + For optimal performance when handling range requests, it's recommended + that your response data object implements `seekable`, `seek` and `tell` + methods as described by :py:class:`io.IOBase`. Objects returned by + :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods. + + It does not remove the body of the response because that's something + the :meth:`__call__` function does for us automatically. + + Returns self so that you can do ``return resp.make_conditional(req)`` + but modifies the object in-place. + + :param request_or_environ: a request object or WSGI environment to be + used to make the response conditional + against. + :param accept_ranges: This parameter dictates the value of + `Accept-Ranges` header. If ``False`` (default), + the header is not set. If ``True``, it will be set + to ``"bytes"``. If ``None``, it will be set to + ``"none"``. If it's a string, it will use this + value. + :param complete_length: Will be used only in valid Range Requests. + It will set `Content-Range` complete length + value and compute `Content-Length` real value. + This parameter is mandatory for successful + Range Requests completion. + :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` + if `Range` header could not be parsed or satisfied. + + .. versionchanged:: 2.0 + Range processing is skipped if length is 0 instead of + raising a 416 Range Not Satisfiable error. + """ + environ = _get_environ(request_or_environ) + if environ["REQUEST_METHOD"] in ("GET", "HEAD"): + # if the date is not in the headers, add it now. We however + # will not override an already existing header. Unfortunately + # this header will be overriden by many WSGI servers including + # wsgiref. + if "date" not in self.headers: + self.headers["Date"] = http_date() + accept_ranges = _clean_accept_ranges(accept_ranges) + is206 = self._process_range_request(environ, complete_length, accept_ranges) + if not is206 and not is_resource_modified( + environ, + self.headers.get("etag"), + None, + self.headers.get("last-modified"), + ): + if parse_etags(environ.get("HTTP_IF_MATCH")): + self.status_code = 412 + else: + self.status_code = 304 + if ( + self.automatically_set_content_length + and "content-length" not in self.headers + ): + length = self.calculate_content_length() + if length is not None: + self.headers["Content-Length"] = length + return self + + def add_etag(self, overwrite: bool = False, weak: bool = False) -> None: + """Add an etag for the current response if there is none yet. + + .. versionchanged:: 2.0 + SHA-1 is used to generate the value. MD5 may not be + available in some environments. + """ + if overwrite or "etag" not in self.headers: + self.set_etag(generate_etag(self.get_data()), weak) + + +class ResponseStream: + """A file descriptor like object used by the :class:`ResponseStreamMixin` to + represent the body of the stream. It directly pushes into the response + iterable of the response object. + """ + + mode = "wb+" + + def __init__(self, response: Response): + self.response = response + self.closed = False + + def write(self, value: bytes) -> int: + if self.closed: + raise ValueError("I/O operation on closed file") + self.response._ensure_sequence(mutable=True) + self.response.response.append(value) # type: ignore + self.response.headers.pop("Content-Length", None) + return len(value) + + def writelines(self, seq: t.Iterable[bytes]) -> None: + for item in seq: + self.write(item) + + def close(self) -> None: + self.closed = True + + def flush(self) -> None: + if self.closed: + raise ValueError("I/O operation on closed file") + + def isatty(self) -> bool: + if self.closed: + raise ValueError("I/O operation on closed file") + return False + + def tell(self) -> int: + self.response._ensure_sequence() + return sum(map(len, self.response.response)) + + @property + def encoding(self) -> str: + return self.response.charset + + +class ResponseStreamMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'ResponseStreamMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Response' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py new file mode 100644 index 0000000..184ffd0 --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py @@ -0,0 +1,14 @@ +import typing as t +import warnings + + +class UserAgentMixin: + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + warnings.warn( + "'UserAgentMixin' is deprecated and will be removed in" + " Werkzeug 2.1. 'Request' now includes the functionality" + " directly.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wsgi.py b/venv/lib/python3.8/site-packages/werkzeug/wsgi.py new file mode 100644 index 0000000..9cfa74d --- /dev/null +++ b/venv/lib/python3.8/site-packages/werkzeug/wsgi.py @@ -0,0 +1,982 @@ +import io +import re +import typing as t +from functools import partial +from functools import update_wrapper +from itertools import chain + +from ._internal import _make_encode_wrapper +from ._internal import _to_bytes +from ._internal import _to_str +from .sansio import utils as _sansio_utils +from .sansio.utils import host_is_trusted # noqa: F401 # Imported as part of API +from .urls import _URLTuple +from .urls import uri_to_iri +from .urls import url_join +from .urls import url_parse +from .urls import url_quote + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def responder(f: t.Callable[..., "WSGIApplication"]) -> "WSGIApplication": + """Marks a function as responder. Decorate a function with it and it + will automatically call the return value as WSGI application. + + Example:: + + @responder + def application(environ, start_response): + return Response('Hello World!') + """ + return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) + + +def get_current_url( + environ: "WSGIEnvironment", + root_only: bool = False, + strip_querystring: bool = False, + host_only: bool = False, + trusted_hosts: t.Optional[t.Iterable[str]] = None, +) -> str: + """Recreate the URL for a request from the parts in a WSGI + environment. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param environ: The WSGI environment to get the URL parts from. + :param root_only: Only build the root path, don't include the + remaining path or query string. + :param strip_querystring: Don't include the query string. + :param host_only: Only build the scheme and host. + :param trusted_hosts: A list of trusted host names to validate the + host against. + """ + parts = { + "scheme": environ["wsgi.url_scheme"], + "host": get_host(environ, trusted_hosts), + } + + if not host_only: + parts["root_path"] = environ.get("SCRIPT_NAME", "") + + if not root_only: + parts["path"] = environ.get("PATH_INFO", "") + + if not strip_querystring: + parts["query_string"] = environ.get("QUERY_STRING", "").encode("latin1") + + return _sansio_utils.get_current_url(**parts) + + +def _get_server( + environ: "WSGIEnvironment", +) -> t.Optional[t.Tuple[str, t.Optional[int]]]: + name = environ.get("SERVER_NAME") + + if name is None: + return None + + try: + port: t.Optional[int] = int(environ.get("SERVER_PORT", None)) + except (TypeError, ValueError): + # unix socket + port = None + + return name, port + + +def get_host( + environ: "WSGIEnvironment", trusted_hosts: t.Optional[t.Iterable[str]] = None +) -> str: + """Return the host for the given WSGI environment. + + The ``Host`` header is preferred, then ``SERVER_NAME`` if it's not + set. The returned host will only contain the port if it is different + than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param environ: A WSGI environment dict. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + return _sansio_utils.get_host( + environ["wsgi.url_scheme"], + environ.get("HTTP_HOST"), + _get_server(environ), + trusted_hosts, + ) + + +def get_content_length(environ: "WSGIEnvironment") -> t.Optional[int]: + """Returns the content length from the WSGI environment as + integer. If it's not available or chunked transfer encoding is used, + ``None`` is returned. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the content length from. + """ + if environ.get("HTTP_TRANSFER_ENCODING", "") == "chunked": + return None + + content_length = environ.get("CONTENT_LENGTH") + if content_length is not None: + try: + return max(0, int(content_length)) + except (ValueError, TypeError): + pass + return None + + +def get_input_stream( + environ: "WSGIEnvironment", safe_fallback: bool = True +) -> t.IO[bytes]: + """Returns the input stream from the WSGI environment and wraps it + in the most sensible way possible. The stream returned is not the + raw WSGI stream in most cases but one that is safe to read from + without taking into account the content length. + + If content length is not set, the stream will be empty for safety reasons. + If the WSGI server supports chunked or infinite streams, it should set + the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the stream from. + :param safe_fallback: use an empty stream as a safe fallback when the + content length is not set. Disabling this allows infinite streams, + which can be a denial-of-service risk. + """ + stream = t.cast(t.IO[bytes], environ["wsgi.input"]) + content_length = get_content_length(environ) + + # A wsgi extension that tells us if the input is terminated. In + # that case we return the stream unchanged as we know we can safely + # read it until the end. + if environ.get("wsgi.input_terminated"): + return stream + + # If the request doesn't specify a content length, returning the stream is + # potentially dangerous because it could be infinite, malicious or not. If + # safe_fallback is true, return an empty stream instead for safety. + if content_length is None: + return io.BytesIO() if safe_fallback else stream + + # Otherwise limit the stream to the content length + return t.cast(t.IO[bytes], LimitedStream(stream, content_length)) + + +def get_query_string(environ: "WSGIEnvironment") -> str: + """Returns the ``QUERY_STRING`` from the WSGI environment. This also + takes care of the WSGI decoding dance. The string returned will be + restricted to ASCII characters. + + :param environ: WSGI environment to get the query string from. + + .. versionadded:: 0.9 + """ + qs = environ.get("QUERY_STRING", "").encode("latin1") + # QUERY_STRING really should be ascii safe but some browsers + # will send us some unicode stuff (I am looking at you IE). + # In that case we want to urllib quote it badly. + return url_quote(qs, safe=":&%=+$!*'(),") + + +def get_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> str: + """Return the ``PATH_INFO`` from the WSGI environment and decode it + unless ``charset`` is ``None``. + + :param environ: WSGI environment to get the path from. + :param charset: The charset for the path info, or ``None`` if no + decoding should be performed. + :param errors: The decoding error handling. + + .. versionadded:: 0.9 + """ + path = environ.get("PATH_INFO", "").encode("latin1") + return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore + + +def get_script_name( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> str: + """Return the ``SCRIPT_NAME`` from the WSGI environment and decode + it unless `charset` is set to ``None``. + + :param environ: WSGI environment to get the path from. + :param charset: The charset for the path, or ``None`` if no decoding + should be performed. + :param errors: The decoding error handling. + + .. versionadded:: 0.9 + """ + path = environ.get("SCRIPT_NAME", "").encode("latin1") + return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore + + +def pop_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> t.Optional[str]: + """Removes and returns the next segment of `PATH_INFO`, pushing it onto + `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. + + If the `charset` is set to `None` bytes are returned. + + If there are empty segments (``'/foo//bar``) these are ignored but + properly pushed to the `SCRIPT_NAME`: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> pop_path_info(env) + 'a' + >>> env['SCRIPT_NAME'] + '/foo/a' + >>> pop_path_info(env) + 'b' + >>> env['SCRIPT_NAME'] + '/foo/a/b' + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is modified. + :param charset: The ``encoding`` parameter passed to + :func:`bytes.decode`. + :param errors: The ``errors`` paramater passed to + :func:`bytes.decode`. + """ + path = environ.get("PATH_INFO") + if not path: + return None + + script_name = environ.get("SCRIPT_NAME", "") + + # shift multiple leading slashes over + old_path = path + path = path.lstrip("/") + if path != old_path: + script_name += "/" * (len(old_path) - len(path)) + + if "/" not in path: + environ["PATH_INFO"] = "" + environ["SCRIPT_NAME"] = script_name + path + rv = path.encode("latin1") + else: + segment, path = path.split("/", 1) + environ["PATH_INFO"] = f"/{path}" + environ["SCRIPT_NAME"] = script_name + segment + rv = segment.encode("latin1") + + return _to_str(rv, charset, errors, allow_none_charset=True) # type: ignore + + +def peek_path_info( + environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" +) -> t.Optional[str]: + """Returns the next segment on the `PATH_INFO` or `None` if there + is none. Works like :func:`pop_path_info` without modifying the + environment: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> peek_path_info(env) + 'a' + >>> peek_path_info(env) + 'a' + + If the `charset` is set to `None` bytes are returned. + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is checked. + """ + segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1) + if segments: + return _to_str( # type: ignore + segments[0].encode("latin1"), charset, errors, allow_none_charset=True + ) + return None + + +def extract_path_info( + environ_or_baseurl: t.Union[str, "WSGIEnvironment"], + path_or_url: t.Union[str, _URLTuple], + charset: str = "utf-8", + errors: str = "werkzeug.url_quote", + collapse_http_schemes: bool = True, +) -> t.Optional[str]: + """Extracts the path info from the given URL (or WSGI environment) and + path. The path info returned is a string. The URLs might also be IRIs. + + If the path info could not be determined, `None` is returned. + + Some examples: + + >>> extract_path_info('http://example.com/app', '/app/hello') + '/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello') + '/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello', + ... collapse_http_schemes=False) is None + True + + Instead of providing a base URL you can also pass a WSGI environment. + + :param environ_or_baseurl: a WSGI environment dict, a base URL or + base IRI. This is the root of the + application. + :param path_or_url: an absolute path from the server root, a + relative path (in which case it's the path info) + or a full URL. + :param charset: the charset for byte data in URLs + :param errors: the error handling on decode + :param collapse_http_schemes: if set to `False` the algorithm does + not assume that http and https on the + same server point to the same + resource. + + .. versionchanged:: 0.15 + The ``errors`` parameter defaults to leaving invalid bytes + quoted instead of replacing them. + + .. versionadded:: 0.6 + """ + + def _normalize_netloc(scheme: str, netloc: str) -> str: + parts = netloc.split("@", 1)[-1].split(":", 1) + port: t.Optional[str] + + if len(parts) == 2: + netloc, port = parts + if (scheme == "http" and port == "80") or ( + scheme == "https" and port == "443" + ): + port = None + else: + netloc = parts[0] + port = None + + if port is not None: + netloc += f":{port}" + + return netloc + + # make sure whatever we are working on is a IRI and parse it + path = uri_to_iri(path_or_url, charset, errors) + if isinstance(environ_or_baseurl, dict): + environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) + base_iri = uri_to_iri(environ_or_baseurl, charset, errors) + base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] + cur_scheme, cur_netloc, cur_path = url_parse(url_join(base_iri, path))[:3] + + # normalize the network location + base_netloc = _normalize_netloc(base_scheme, base_netloc) + cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) + + # is that IRI even on a known HTTP scheme? + if collapse_http_schemes: + for scheme in base_scheme, cur_scheme: + if scheme not in ("http", "https"): + return None + else: + if not (base_scheme in ("http", "https") and base_scheme == cur_scheme): + return None + + # are the netlocs compatible? + if base_netloc != cur_netloc: + return None + + # are we below the application path? + base_path = base_path.rstrip("/") + if not cur_path.startswith(base_path): + return None + + return f"/{cur_path[len(base_path) :].lstrip('/')}" + + +class ClosingIterator: + """The WSGI specification requires that all middlewares and gateways + respect the `close` callback of the iterable returned by the application. + Because it is useful to add another close action to a returned iterable + and adding a custom iterable is a boring task this class can be used for + that:: + + return ClosingIterator(app(environ, start_response), [cleanup_session, + cleanup_locals]) + + If there is just one close function it can be passed instead of the list. + + A closing iterator is not needed if the application uses response objects + and finishes the processing if the response is started:: + + try: + return response(environ, start_response) + finally: + cleanup_session() + cleanup_locals() + """ + + def __init__( + self, + iterable: t.Iterable[bytes], + callbacks: t.Optional[ + t.Union[t.Callable[[], None], t.Iterable[t.Callable[[], None]]] + ] = None, + ) -> None: + iterator = iter(iterable) + self._next = t.cast(t.Callable[[], bytes], partial(next, iterator)) + if callbacks is None: + callbacks = [] + elif callable(callbacks): + callbacks = [callbacks] + else: + callbacks = list(callbacks) + iterable_close = getattr(iterable, "close", None) + if iterable_close: + callbacks.insert(0, iterable_close) + self._callbacks = callbacks + + def __iter__(self) -> "ClosingIterator": + return self + + def __next__(self) -> bytes: + return self._next() + + def close(self) -> None: + for callback in self._callbacks: + callback() + + +def wrap_file( + environ: "WSGIEnvironment", file: t.IO[bytes], buffer_size: int = 8192 +) -> t.Iterable[bytes]: + """Wraps a file. This uses the WSGI server's file wrapper if available + or otherwise the generic :class:`FileWrapper`. + + .. versionadded:: 0.5 + + If the file wrapper from the WSGI server is used it's important to not + iterate over it from inside the application but to pass it through + unchanged. If you want to pass out a file wrapper inside a response + object you have to set :attr:`Response.direct_passthrough` to `True`. + + More information about file wrappers are available in :pep:`333`. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + return environ.get("wsgi.file_wrapper", FileWrapper)( # type: ignore + file, buffer_size + ) + + +class FileWrapper: + """This class can be used to convert a :class:`file`-like object into + an iterable. It yields `buffer_size` blocks until the file is fully + read. + + You should not use this class directly but rather use the + :func:`wrap_file` function that uses the WSGI server's file wrapper + support if it's available. + + .. versionadded:: 0.5 + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + + def __init__(self, file: t.IO[bytes], buffer_size: int = 8192) -> None: + self.file = file + self.buffer_size = buffer_size + + def close(self) -> None: + if hasattr(self.file, "close"): + self.file.close() + + def seekable(self) -> bool: + if hasattr(self.file, "seekable"): + return self.file.seekable() + if hasattr(self.file, "seek"): + return True + return False + + def seek(self, *args: t.Any) -> None: + if hasattr(self.file, "seek"): + self.file.seek(*args) + + def tell(self) -> t.Optional[int]: + if hasattr(self.file, "tell"): + return self.file.tell() + return None + + def __iter__(self) -> "FileWrapper": + return self + + def __next__(self) -> bytes: + data = self.file.read(self.buffer_size) + if data: + return data + raise StopIteration() + + +class _RangeWrapper: + # private for now, but should we make it public in the future ? + + """This class can be used to convert an iterable object into + an iterable that will only yield a piece of the underlying content. + It yields blocks until the underlying stream range is fully read. + The yielded blocks will have a size that can't exceed the original + iterator defined block size, but that can be smaller. + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param iterable: an iterable object with a :meth:`__next__` method. + :param start_byte: byte from which read will start. + :param byte_range: how many bytes to read. + """ + + def __init__( + self, + iterable: t.Union[t.Iterable[bytes], t.IO[bytes]], + start_byte: int = 0, + byte_range: t.Optional[int] = None, + ): + self.iterable = iter(iterable) + self.byte_range = byte_range + self.start_byte = start_byte + self.end_byte = None + + if byte_range is not None: + self.end_byte = start_byte + byte_range + + self.read_length = 0 + self.seekable = ( + hasattr(iterable, "seekable") and iterable.seekable() # type: ignore + ) + self.end_reached = False + + def __iter__(self) -> "_RangeWrapper": + return self + + def _next_chunk(self) -> bytes: + try: + chunk = next(self.iterable) + self.read_length += len(chunk) + return chunk + except StopIteration: + self.end_reached = True + raise + + def _first_iteration(self) -> t.Tuple[t.Optional[bytes], int]: + chunk = None + if self.seekable: + self.iterable.seek(self.start_byte) # type: ignore + self.read_length = self.iterable.tell() # type: ignore + contextual_read_length = self.read_length + else: + while self.read_length <= self.start_byte: + chunk = self._next_chunk() + if chunk is not None: + chunk = chunk[self.start_byte - self.read_length :] + contextual_read_length = self.start_byte + return chunk, contextual_read_length + + def _next(self) -> bytes: + if self.end_reached: + raise StopIteration() + chunk = None + contextual_read_length = self.read_length + if self.read_length == 0: + chunk, contextual_read_length = self._first_iteration() + if chunk is None: + chunk = self._next_chunk() + if self.end_byte is not None and self.read_length >= self.end_byte: + self.end_reached = True + return chunk[: self.end_byte - contextual_read_length] + return chunk + + def __next__(self) -> bytes: + chunk = self._next() + if chunk: + return chunk + self.end_reached = True + raise StopIteration() + + def close(self) -> None: + if hasattr(self.iterable, "close"): + self.iterable.close() # type: ignore + + +def _make_chunk_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + limit: t.Optional[int], + buffer_size: int, +) -> t.Iterator[bytes]: + """Helper for the line and chunk iter functions.""" + if isinstance(stream, (bytes, bytearray, str)): + raise TypeError( + "Passed a string or byte object instead of true iterator or stream." + ) + if not hasattr(stream, "read"): + for item in stream: + if item: + yield item + return + stream = t.cast(t.IO[bytes], stream) + if not isinstance(stream, LimitedStream) and limit is not None: + stream = t.cast(t.IO[bytes], LimitedStream(stream, limit)) + _read = stream.read + while True: + item = _read(buffer_size) + if not item: + break + yield item + + +def make_line_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + limit: t.Optional[int] = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Safely iterates line-based over an input stream. If the input stream + is not a :class:`LimitedStream` the `limit` parameter is mandatory. + + This uses the stream's :meth:`~file.read` method internally as opposite + to the :meth:`~file.readline` method that is unsafe and can only be used + in violation of the WSGI specification. The same problem applies to the + `__iter__` function of the input stream which calls :meth:`~file.readline` + without arguments. + + If you need line-by-line processing it's strongly recommended to iterate + over the input stream using this helper function. + + .. versionchanged:: 0.8 + This function now ensures that the limit was reached. + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + + :param stream: the stream or iterate to iterate over. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is a :class:`LimitedStream`. + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, "") + if not first_item: + return + + s = _make_encode_wrapper(first_item) + empty = t.cast(bytes, s("")) + cr = t.cast(bytes, s("\r")) + lf = t.cast(bytes, s("\n")) + crlf = t.cast(bytes, s("\r\n")) + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + + def _iter_basic_lines() -> t.Iterator[bytes]: + _join = empty.join + buffer: t.List[bytes] = [] + while True: + new_data = next(_iter, "") + if not new_data: + break + new_buf: t.List[bytes] = [] + buf_size = 0 + for item in t.cast( + t.Iterator[bytes], chain(buffer, new_data.splitlines(True)) + ): + new_buf.append(item) + buf_size += len(item) + if item and item[-1:] in crlf: + yield _join(new_buf) + new_buf = [] + elif cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buffer = new_buf + if buffer: + yield _join(buffer) + + # This hackery is necessary to merge 'foo\r' and '\n' into one item + # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. + previous = empty + for item in _iter_basic_lines(): + if item == lf and previous[-1:] == cr: + previous += item + item = empty + if previous: + yield previous + previous = item + if previous: + yield previous + + +def make_chunk_iter( + stream: t.Union[t.Iterable[bytes], t.IO[bytes]], + separator: bytes, + limit: t.Optional[int] = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Works like :func:`make_line_iter` but accepts a separator + which divides chunks. If you want newline based processing + you should use :func:`make_line_iter` instead as it + supports arbitrary newline markers. + + .. versionadded:: 0.8 + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + + :param stream: the stream or iterate to iterate over. + :param separator: the separator that divides chunks. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is otherwise already limited). + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, b"") + if not first_item: + return + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + if isinstance(first_item, str): + separator = _to_str(separator) + _split = re.compile(f"({re.escape(separator)})").split + _join = "".join + else: + separator = _to_bytes(separator) + _split = re.compile(b"(" + re.escape(separator) + b")").split + _join = b"".join + + buffer: t.List[bytes] = [] + while True: + new_data = next(_iter, b"") + if not new_data: + break + chunks = _split(new_data) + new_buf: t.List[bytes] = [] + buf_size = 0 + for item in chain(buffer, chunks): + if item == separator: + yield _join(new_buf) + new_buf = [] + buf_size = 0 + else: + buf_size += len(item) + new_buf.append(item) + + if cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buf_size = len(rv) + + buffer = new_buf + if buffer: + yield _join(buffer) + + +class LimitedStream(io.IOBase): + """Wraps a stream so that it doesn't read more than n bytes. If the + stream is exhausted and the caller tries to get more bytes from it + :func:`on_exhausted` is called which by default returns an empty + string. The return value of that function is forwarded + to the reader function. So if it returns an empty string + :meth:`read` will return an empty string as well. + + The limit however must never be higher than what the stream can + output. Otherwise :meth:`readlines` will try to read past the + limit. + + .. admonition:: Note on WSGI compliance + + calls to :meth:`readline` and :meth:`readlines` are not + WSGI compliant because it passes a size argument to the + readline methods. Unfortunately the WSGI PEP is not safely + implementable without a size argument to :meth:`readline` + because there is no EOF marker in the stream. As a result + of that the use of :meth:`readline` is discouraged. + + For the same reason iterating over the :class:`LimitedStream` + is not portable. It internally calls :meth:`readline`. + + We strongly suggest using :meth:`read` only or using the + :func:`make_line_iter` which safely iterates line-based + over a WSGI input stream. + + :param stream: the stream to wrap. + :param limit: the limit for the stream, must not be longer than + what the string can provide if the stream does not + end with `EOF` (like `wsgi.input`) + """ + + def __init__(self, stream: t.IO[bytes], limit: int) -> None: + self._read = stream.read + self._readline = stream.readline + self._pos = 0 + self.limit = limit + + def __iter__(self) -> "LimitedStream": + return self + + @property + def is_exhausted(self) -> bool: + """If the stream is exhausted this attribute is `True`.""" + return self._pos >= self.limit + + def on_exhausted(self) -> bytes: + """This is called when the stream tries to read past the limit. + The return value of this function is returned from the reading + function. + """ + # Read null bytes from the stream so that we get the + # correct end of stream marker. + return self._read(0) + + def on_disconnect(self) -> bytes: + """What should happen if a disconnect is detected? The return + value of this function is returned from read functions in case + the client went away. By default a + :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised. + """ + from .exceptions import ClientDisconnected + + raise ClientDisconnected() + + def exhaust(self, chunk_size: int = 1024 * 64) -> None: + """Exhaust the stream. This consumes all the data left until the + limit is reached. + + :param chunk_size: the size for a chunk. It will read the chunk + until the stream is exhausted and throw away + the results. + """ + to_read = self.limit - self._pos + chunk = chunk_size + while to_read > 0: + chunk = min(to_read, chunk) + self.read(chunk) + to_read -= chunk + + def read(self, size: t.Optional[int] = None) -> bytes: + """Read `size` bytes or if size is not provided everything is read. + + :param size: the number of bytes read. + """ + if self._pos >= self.limit: + return self.on_exhausted() + if size is None or size == -1: # -1 is for consistence with file + size = self.limit + to_read = min(self.limit - self._pos, size) + try: + read = self._read(to_read) + except (OSError, ValueError): + return self.on_disconnect() + if to_read and len(read) != to_read: + return self.on_disconnect() + self._pos += len(read) + return read + + def readline(self, size: t.Optional[int] = None) -> bytes: + """Reads one line from the stream.""" + if self._pos >= self.limit: + return self.on_exhausted() + if size is None: + size = self.limit - self._pos + else: + size = min(size, self.limit - self._pos) + try: + line = self._readline(size) + except (ValueError, OSError): + return self.on_disconnect() + if size and not line: + return self.on_disconnect() + self._pos += len(line) + return line + + def readlines(self, size: t.Optional[int] = None) -> t.List[bytes]: + """Reads a file into a list of strings. It calls :meth:`readline` + until the file is read to the end. It does support the optional + `size` argument if the underlying stream supports it for + `readline`. + """ + last_pos = self._pos + result = [] + if size is not None: + end = min(self.limit, last_pos + size) + else: + end = self.limit + while True: + if size is not None: + size -= last_pos - self._pos + if self._pos >= end: + break + result.append(self.readline(size)) + if size is not None: + last_pos = self._pos + return result + + def tell(self) -> int: + """Returns the position of the stream. + + .. versionadded:: 0.9 + """ + return self._pos + + def __next__(self) -> bytes: + line = self.readline() + if not line: + raise StopIteration() + return line + + def readable(self) -> bool: + return True diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg new file mode 100644 index 0000000..8a88747 --- /dev/null +++ b/venv/pyvenv.cfg @@ -0,0 +1,3 @@ +home = /Users/indraneel/opt/anaconda3/bin +include-system-site-packages = false +version = 3.8.5 From aa3cf162fa7e18be25382f5d51a1d28ed161a146 Mon Sep 17 00:00:00 2001 From: indraneelSLU Date: Wed, 10 Nov 2021 05:52:11 -0600 Subject: [PATCH 3/5] updated Procfile --- Procfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Procfile b/Procfile index d06acd1..0204613 100644 --- a/Procfile +++ b/Procfile @@ -1 +1 @@ -web: gunicorn wsgi:app +web: gunicorn routes:app From eb2826ce41a84e44ba6f90f519bd1d34d8165d73 Mon Sep 17 00:00:00 2001 From: indraneelSLU Date: Wed, 10 Nov 2021 06:14:02 -0600 Subject: [PATCH 4/5] ignoring environment vars --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 92f2ed1..96ebdd2 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ Thumbs.db # folders to be ignored env/ +venv +__pycache__ From bda098e4d292eb50741901b8fe1b6eef16a46233 Mon Sep 17 00:00:00 2001 From: indraneelSLU Date: Wed, 10 Nov 2021 06:16:29 -0600 Subject: [PATCH 5/5] removed environment --- venv/bin/Activate.ps1 | 241 - venv/bin/activate | 76 - venv/bin/activate.csh | 37 - venv/bin/activate.fish | 75 - venv/bin/easy_install | 8 - venv/bin/easy_install-3.8 | 8 - venv/bin/flask | 8 - venv/bin/gunicorn | 8 - venv/bin/pip | 8 - venv/bin/pip3 | 8 - venv/bin/pip3.8 | 8 - venv/bin/python | 1 - venv/bin/python3 | 1 - .../Flask-2.0.2.dist-info/INSTALLER | 1 - .../Flask-2.0.2.dist-info/LICENSE.rst | 28 - .../Flask-2.0.2.dist-info/METADATA | 125 - .../Flask-2.0.2.dist-info/RECORD | 51 - .../site-packages/Flask-2.0.2.dist-info/WHEEL | 5 - .../Flask-2.0.2.dist-info/entry_points.txt | 3 - .../Flask-2.0.2.dist-info/top_level.txt | 1 - .../Jinja2-3.0.3.dist-info/INSTALLER | 1 - .../Jinja2-3.0.3.dist-info/LICENSE.rst | 28 - .../Jinja2-3.0.3.dist-info/METADATA | 113 - .../Jinja2-3.0.3.dist-info/RECORD | 58 - .../Jinja2-3.0.3.dist-info/WHEEL | 5 - .../Jinja2-3.0.3.dist-info/entry_points.txt | 3 - .../Jinja2-3.0.3.dist-info/top_level.txt | 1 - .../MarkupSafe-2.0.1.dist-info/INSTALLER | 1 - .../MarkupSafe-2.0.1.dist-info/LICENSE.rst | 28 - .../MarkupSafe-2.0.1.dist-info/METADATA | 100 - .../MarkupSafe-2.0.1.dist-info/RECORD | 14 - .../MarkupSafe-2.0.1.dist-info/WHEEL | 5 - .../MarkupSafe-2.0.1.dist-info/top_level.txt | 1 - .../Werkzeug-2.0.2.dist-info/INSTALLER | 1 - .../Werkzeug-2.0.2.dist-info/LICENSE.rst | 28 - .../Werkzeug-2.0.2.dist-info/METADATA | 129 - .../Werkzeug-2.0.2.dist-info/RECORD | 111 - .../Werkzeug-2.0.2.dist-info/WHEEL | 5 - .../Werkzeug-2.0.2.dist-info/top_level.txt | 1 - .../click-8.0.3.dist-info/INSTALLER | 1 - .../click-8.0.3.dist-info/LICENSE.rst | 28 - .../click-8.0.3.dist-info/METADATA | 111 - .../click-8.0.3.dist-info/RECORD | 41 - .../site-packages/click-8.0.3.dist-info/WHEEL | 5 - .../click-8.0.3.dist-info/top_level.txt | 1 - .../python3.8/site-packages/click/__init__.py | 75 - .../python3.8/site-packages/click/_compat.py | 627 -- .../site-packages/click/_termui_impl.py | 718 -- .../site-packages/click/_textwrap.py | 49 - .../site-packages/click/_unicodefun.py | 100 - .../site-packages/click/_winconsole.py | 279 - .../lib/python3.8/site-packages/click/core.py | 2953 ------ .../site-packages/click/decorators.py | 436 - .../site-packages/click/exceptions.py | 287 - .../site-packages/click/formatting.py | 301 - .../python3.8/site-packages/click/globals.py | 69 - .../python3.8/site-packages/click/parser.py | 529 -- .../python3.8/site-packages/click/py.typed | 0 .../site-packages/click/shell_completion.py | 581 -- .../python3.8/site-packages/click/termui.py | 809 -- .../python3.8/site-packages/click/testing.py | 479 - .../python3.8/site-packages/click/types.py | 1052 --- .../python3.8/site-packages/click/utils.py | 579 -- .../python3.8/site-packages/easy_install.py | 5 - .../python3.8/site-packages/flask/__init__.py | 46 - .../python3.8/site-packages/flask/__main__.py | 3 - venv/lib/python3.8/site-packages/flask/app.py | 2091 ----- .../site-packages/flask/blueprints.py | 609 -- venv/lib/python3.8/site-packages/flask/cli.py | 998 -- .../python3.8/site-packages/flask/config.py | 295 - venv/lib/python3.8/site-packages/flask/ctx.py | 480 - .../site-packages/flask/debughelpers.py | 172 - .../python3.8/site-packages/flask/globals.py | 59 - .../python3.8/site-packages/flask/helpers.py | 836 -- .../site-packages/flask/json/__init__.py | 357 - .../python3.8/site-packages/flask/json/tag.py | 312 - .../python3.8/site-packages/flask/logging.py | 74 - .../python3.8/site-packages/flask/py.typed | 0 .../python3.8/site-packages/flask/scaffold.py | 875 -- .../python3.8/site-packages/flask/sessions.py | 404 - .../python3.8/site-packages/flask/signals.py | 56 - .../site-packages/flask/templating.py | 165 - .../python3.8/site-packages/flask/testing.py | 280 - .../python3.8/site-packages/flask/typing.py | 56 - .../python3.8/site-packages/flask/views.py | 158 - .../python3.8/site-packages/flask/wrappers.py | 167 - .../gunicorn-20.1.0.dist-info/INSTALLER | 1 - .../gunicorn-20.1.0.dist-info/LICENSE | 23 - .../gunicorn-20.1.0.dist-info/METADATA | 120 - .../gunicorn-20.1.0.dist-info/RECORD | 76 - .../gunicorn-20.1.0.dist-info/WHEEL | 5 - .../entry_points.txt | 7 - .../gunicorn-20.1.0.dist-info/top_level.txt | 1 - .../site-packages/gunicorn/__init__.py | 9 - .../site-packages/gunicorn/__main__.py | 7 - .../site-packages/gunicorn/app/__init__.py | 4 - .../site-packages/gunicorn/app/base.py | 231 - .../site-packages/gunicorn/app/pasterapp.py | 75 - .../site-packages/gunicorn/app/wsgiapp.py | 71 - .../site-packages/gunicorn/arbiter.py | 652 -- .../site-packages/gunicorn/config.py | 2190 ----- .../python3.8/site-packages/gunicorn/debug.py | 69 - .../site-packages/gunicorn/errors.py | 29 - .../site-packages/gunicorn/glogging.py | 464 - .../site-packages/gunicorn/http/__init__.py | 9 - .../site-packages/gunicorn/http/body.py | 262 - .../site-packages/gunicorn/http/errors.py | 120 - .../site-packages/gunicorn/http/message.py | 356 - .../site-packages/gunicorn/http/parser.py | 52 - .../site-packages/gunicorn/http/unreader.py | 79 - .../site-packages/gunicorn/http/wsgi.py | 393 - .../gunicorn/instrument/__init__.py | 0 .../gunicorn/instrument/statsd.py | 130 - .../site-packages/gunicorn/pidfile.py | 86 - .../site-packages/gunicorn/reloader.py | 132 - .../python3.8/site-packages/gunicorn/sock.py | 212 - .../site-packages/gunicorn/systemd.py | 77 - .../python3.8/site-packages/gunicorn/util.py | 639 -- .../gunicorn/workers/__init__.py | 15 - .../site-packages/gunicorn/workers/base.py | 273 - .../gunicorn/workers/base_async.py | 148 - .../gunicorn/workers/geventlet.py | 179 - .../site-packages/gunicorn/workers/ggevent.py | 189 - .../site-packages/gunicorn/workers/gthread.py | 362 - .../gunicorn/workers/gtornado.py | 171 - .../site-packages/gunicorn/workers/sync.py | 211 - .../gunicorn/workers/workertmp.py | 55 - .../itsdangerous-2.0.1.dist-info/INSTALLER | 1 - .../itsdangerous-2.0.1.dist-info/LICENSE.rst | 28 - .../itsdangerous-2.0.1.dist-info/METADATA | 96 - .../itsdangerous-2.0.1.dist-info/RECORD | 25 - .../itsdangerous-2.0.1.dist-info/WHEEL | 5 - .../top_level.txt | 1 - .../site-packages/itsdangerous/__init__.py | 22 - .../site-packages/itsdangerous/_json.py | 34 - .../site-packages/itsdangerous/encoding.py | 54 - .../site-packages/itsdangerous/exc.py | 107 - .../site-packages/itsdangerous/jws.py | 259 - .../site-packages/itsdangerous/py.typed | 0 .../site-packages/itsdangerous/serializer.py | 295 - .../site-packages/itsdangerous/signer.py | 257 - .../site-packages/itsdangerous/timed.py | 227 - .../site-packages/itsdangerous/url_safe.py | 80 - .../site-packages/jinja2/__init__.py | 45 - .../site-packages/jinja2/_identifier.py | 6 - .../site-packages/jinja2/async_utils.py | 75 - .../python3.8/site-packages/jinja2/bccache.py | 364 - .../site-packages/jinja2/compiler.py | 1957 ---- .../site-packages/jinja2/constants.py | 20 - .../python3.8/site-packages/jinja2/debug.py | 259 - .../site-packages/jinja2/defaults.py | 48 - .../site-packages/jinja2/environment.py | 1661 ---- .../site-packages/jinja2/exceptions.py | 166 - .../lib/python3.8/site-packages/jinja2/ext.py | 879 -- .../python3.8/site-packages/jinja2/filters.py | 1824 ---- .../site-packages/jinja2/idtracking.py | 318 - .../python3.8/site-packages/jinja2/lexer.py | 869 -- .../python3.8/site-packages/jinja2/loaders.py | 652 -- .../python3.8/site-packages/jinja2/meta.py | 111 - .../site-packages/jinja2/nativetypes.py | 124 - .../python3.8/site-packages/jinja2/nodes.py | 1204 --- .../site-packages/jinja2/optimizer.py | 47 - .../python3.8/site-packages/jinja2/parser.py | 1040 --- .../python3.8/site-packages/jinja2/py.typed | 0 .../python3.8/site-packages/jinja2/runtime.py | 1104 --- .../python3.8/site-packages/jinja2/sandbox.py | 428 - .../python3.8/site-packages/jinja2/tests.py | 255 - .../python3.8/site-packages/jinja2/utils.py | 854 -- .../python3.8/site-packages/jinja2/visitor.py | 92 - .../site-packages/markupsafe/__init__.py | 288 - .../site-packages/markupsafe/_native.py | 75 - .../site-packages/markupsafe/_speedups.c | 339 - .../markupsafe/_speedups.cpython-38-darwin.so | Bin 35480 -> 0 bytes .../site-packages/markupsafe/_speedups.pyi | 9 - .../site-packages/markupsafe/py.typed | 0 .../pip-20.1.1.dist-info/INSTALLER | 1 - .../pip-20.1.1.dist-info/LICENSE.txt | 20 - .../pip-20.1.1.dist-info/METADATA | 87 - .../site-packages/pip-20.1.1.dist-info/RECORD | 743 -- .../site-packages/pip-20.1.1.dist-info/WHEEL | 6 - .../pip-20.1.1.dist-info/entry_points.txt | 5 - .../pip-20.1.1.dist-info/top_level.txt | 1 - .../python3.8/site-packages/pip/__init__.py | 18 - .../python3.8/site-packages/pip/__main__.py | 26 - .../site-packages/pip/_internal/__init__.py | 17 - .../site-packages/pip/_internal/build_env.py | 219 - .../site-packages/pip/_internal/cache.py | 349 - .../pip/_internal/cli/__init__.py | 4 - .../pip/_internal/cli/autocompletion.py | 164 - .../pip/_internal/cli/base_command.py | 228 - .../pip/_internal/cli/cmdoptions.py | 962 -- .../pip/_internal/cli/command_context.py | 36 - .../site-packages/pip/_internal/cli/main.py | 75 - .../pip/_internal/cli/main_parser.py | 99 - .../site-packages/pip/_internal/cli/parser.py | 266 - .../pip/_internal/cli/progress_bars.py | 277 - .../pip/_internal/cli/req_command.py | 408 - .../pip/_internal/cli/spinners.py | 173 - .../pip/_internal/cli/status_codes.py | 8 - .../pip/_internal/commands/__init__.py | 122 - .../pip/_internal/commands/cache.py | 181 - .../pip/_internal/commands/check.py | 51 - .../pip/_internal/commands/completion.py | 95 - .../pip/_internal/commands/configuration.py | 233 - .../pip/_internal/commands/debug.py | 237 - .../pip/_internal/commands/download.py | 142 - .../pip/_internal/commands/freeze.py | 99 - .../pip/_internal/commands/hash.py | 58 - .../pip/_internal/commands/help.py | 41 - .../pip/_internal/commands/install.py | 691 -- .../pip/_internal/commands/list.py | 299 - .../pip/_internal/commands/search.py | 146 - .../pip/_internal/commands/show.py | 180 - .../pip/_internal/commands/uninstall.py | 89 - .../pip/_internal/commands/wheel.py | 190 - .../pip/_internal/configuration.py | 426 - .../pip/_internal/distributions/__init__.py | 24 - .../pip/_internal/distributions/base.py | 45 - .../pip/_internal/distributions/installed.py | 24 - .../pip/_internal/distributions/sdist.py | 104 - .../pip/_internal/distributions/wheel.py | 36 - .../site-packages/pip/_internal/exceptions.py | 308 - .../pip/_internal/index/__init__.py | 2 - .../pip/_internal/index/collector.py | 661 -- .../pip/_internal/index/package_finder.py | 1016 -- .../site-packages/pip/_internal/locations.py | 194 - .../site-packages/pip/_internal/main.py | 16 - .../pip/_internal/models/__init__.py | 2 - .../pip/_internal/models/candidate.py | 36 - .../pip/_internal/models/direct_url.py | 245 - .../pip/_internal/models/format_control.py | 84 - .../pip/_internal/models/index.py | 31 - .../pip/_internal/models/link.py | 236 - .../pip/_internal/models/scheme.py | 25 - .../pip/_internal/models/search_scope.py | 133 - .../pip/_internal/models/selection_prefs.py | 47 - .../pip/_internal/models/target_python.py | 110 - .../pip/_internal/models/wheel.py | 78 - .../pip/_internal/network/__init__.py | 2 - .../pip/_internal/network/auth.py | 298 - .../pip/_internal/network/cache.py | 81 - .../pip/_internal/network/download.py | 200 - .../pip/_internal/network/session.py | 421 - .../pip/_internal/network/utils.py | 48 - .../pip/_internal/network/xmlrpc.py | 44 - .../pip/_internal/operations/__init__.py | 0 .../_internal/operations/build/__init__.py | 0 .../_internal/operations/build/metadata.py | 40 - .../operations/build/metadata_legacy.py | 77 - .../pip/_internal/operations/build/wheel.py | 46 - .../operations/build/wheel_legacy.py | 115 - .../pip/_internal/operations/check.py | 163 - .../pip/_internal/operations/freeze.py | 272 - .../_internal/operations/install/__init__.py | 2 - .../operations/install/editable_legacy.py | 52 - .../_internal/operations/install/legacy.py | 142 - .../pip/_internal/operations/install/wheel.py | 631 -- .../pip/_internal/operations/prepare.py | 568 -- .../site-packages/pip/_internal/pyproject.py | 196 - .../pip/_internal/req/__init__.py | 92 - .../pip/_internal/req/constructors.py | 464 - .../pip/_internal/req/req_file.py | 582 -- .../pip/_internal/req/req_install.py | 850 -- .../pip/_internal/req/req_set.py | 202 - .../pip/_internal/req/req_tracker.py | 151 - .../pip/_internal/req/req_uninstall.py | 649 -- .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 - .../_internal/resolution/legacy/__init__.py | 0 .../_internal/resolution/legacy/resolver.py | 459 - .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 52 - .../resolution/resolvelib/candidates.py | 450 - .../resolution/resolvelib/factory.py | 201 - .../resolution/resolvelib/provider.py | 54 - .../resolution/resolvelib/requirements.py | 119 - .../resolution/resolvelib/resolver.py | 174 - .../pip/_internal/self_outdated_check.py | 242 - .../pip/_internal/utils/__init__.py | 0 .../pip/_internal/utils/appdirs.py | 44 - .../pip/_internal/utils/compat.py | 270 - .../pip/_internal/utils/compatibility_tags.py | 169 - .../pip/_internal/utils/deprecation.py | 104 - .../pip/_internal/utils/direct_url_helpers.py | 130 - .../pip/_internal/utils/distutils_args.py | 48 - .../pip/_internal/utils/encoding.py | 42 - .../pip/_internal/utils/entrypoints.py | 31 - .../pip/_internal/utils/filesystem.py | 222 - .../pip/_internal/utils/filetypes.py | 16 - .../pip/_internal/utils/glibc.py | 98 - .../pip/_internal/utils/hashes.py | 133 - .../_internal/utils/inject_securetransport.py | 36 - .../pip/_internal/utils/logging.py | 399 - .../site-packages/pip/_internal/utils/misc.py | 913 -- .../pip/_internal/utils/models.py | 42 - .../pip/_internal/utils/packaging.py | 94 - .../pip/_internal/utils/pkg_resources.py | 44 - .../pip/_internal/utils/setuptools_build.py | 181 - .../pip/_internal/utils/subprocess.py | 277 - .../pip/_internal/utils/temp_dir.py | 271 - .../pip/_internal/utils/typing.py | 38 - .../pip/_internal/utils/unpacking.py | 272 - .../site-packages/pip/_internal/utils/urls.py | 55 - .../pip/_internal/utils/virtualenv.py | 116 - .../pip/_internal/utils/wheel.py | 225 - .../pip/_internal/vcs/__init__.py | 15 - .../site-packages/pip/_internal/vcs/bazaar.py | 120 - .../site-packages/pip/_internal/vcs/git.py | 394 - .../pip/_internal/vcs/mercurial.py | 161 - .../pip/_internal/vcs/subversion.py | 334 - .../pip/_internal/vcs/versioncontrol.py | 723 -- .../pip/_internal/wheel_builder.py | 309 - .../site-packages/pip/_vendor/__init__.py | 114 - .../site-packages/pip/_vendor/appdirs.py | 633 -- .../pip/_vendor/cachecontrol/__init__.py | 11 - .../pip/_vendor/cachecontrol/_cmd.py | 57 - .../pip/_vendor/cachecontrol/adapter.py | 133 - .../pip/_vendor/cachecontrol/cache.py | 39 - .../_vendor/cachecontrol/caches/__init__.py | 2 - .../_vendor/cachecontrol/caches/file_cache.py | 146 - .../cachecontrol/caches/redis_cache.py | 33 - .../pip/_vendor/cachecontrol/compat.py | 29 - .../pip/_vendor/cachecontrol/controller.py | 376 - .../pip/_vendor/cachecontrol/filewrapper.py | 80 - .../pip/_vendor/cachecontrol/heuristics.py | 135 - .../pip/_vendor/cachecontrol/serialize.py | 188 - .../pip/_vendor/cachecontrol/wrapper.py | 29 - .../pip/_vendor/certifi/__init__.py | 3 - .../pip/_vendor/certifi/__main__.py | 12 - .../pip/_vendor/certifi/cacert.pem | 4641 --------- .../site-packages/pip/_vendor/certifi/core.py | 30 - .../pip/_vendor/chardet/__init__.py | 39 - .../pip/_vendor/chardet/big5freq.py | 386 - .../pip/_vendor/chardet/big5prober.py | 47 - .../pip/_vendor/chardet/chardistribution.py | 233 - .../pip/_vendor/chardet/charsetgroupprober.py | 106 - .../pip/_vendor/chardet/charsetprober.py | 145 - .../pip/_vendor/chardet/cli/__init__.py | 1 - .../pip/_vendor/chardet/cli/chardetect.py | 85 - .../pip/_vendor/chardet/codingstatemachine.py | 88 - .../pip/_vendor/chardet/compat.py | 34 - .../pip/_vendor/chardet/cp949prober.py | 49 - .../pip/_vendor/chardet/enums.py | 76 - .../pip/_vendor/chardet/escprober.py | 101 - .../pip/_vendor/chardet/escsm.py | 246 - .../pip/_vendor/chardet/eucjpprober.py | 92 - .../pip/_vendor/chardet/euckrfreq.py | 195 - .../pip/_vendor/chardet/euckrprober.py | 47 - .../pip/_vendor/chardet/euctwfreq.py | 387 - .../pip/_vendor/chardet/euctwprober.py | 46 - .../pip/_vendor/chardet/gb2312freq.py | 283 - .../pip/_vendor/chardet/gb2312prober.py | 46 - .../pip/_vendor/chardet/hebrewprober.py | 292 - .../pip/_vendor/chardet/jisfreq.py | 325 - .../pip/_vendor/chardet/jpcntx.py | 233 - .../pip/_vendor/chardet/langbulgarianmodel.py | 228 - .../pip/_vendor/chardet/langcyrillicmodel.py | 333 - .../pip/_vendor/chardet/langgreekmodel.py | 225 - .../pip/_vendor/chardet/langhebrewmodel.py | 200 - .../pip/_vendor/chardet/langhungarianmodel.py | 225 - .../pip/_vendor/chardet/langthaimodel.py | 199 - .../pip/_vendor/chardet/langturkishmodel.py | 193 - .../pip/_vendor/chardet/latin1prober.py | 145 - .../pip/_vendor/chardet/mbcharsetprober.py | 91 - .../pip/_vendor/chardet/mbcsgroupprober.py | 54 - .../pip/_vendor/chardet/mbcssm.py | 572 -- .../pip/_vendor/chardet/sbcharsetprober.py | 132 - .../pip/_vendor/chardet/sbcsgroupprober.py | 73 - .../pip/_vendor/chardet/sjisprober.py | 92 - .../pip/_vendor/chardet/universaldetector.py | 286 - .../pip/_vendor/chardet/utf8prober.py | 82 - .../pip/_vendor/chardet/version.py | 9 - .../pip/_vendor/colorama/__init__.py | 6 - .../pip/_vendor/colorama/ansi.py | 102 - .../pip/_vendor/colorama/ansitowin32.py | 257 - .../pip/_vendor/colorama/initialise.py | 80 - .../pip/_vendor/colorama/win32.py | 152 - .../pip/_vendor/colorama/winterm.py | 169 - .../site-packages/pip/_vendor/contextlib2.py | 518 - .../pip/_vendor/distlib/__init__.py | 23 - .../pip/_vendor/distlib/_backport/__init__.py | 6 - .../pip/_vendor/distlib/_backport/misc.py | 41 - .../pip/_vendor/distlib/_backport/shutil.py | 761 -- .../_vendor/distlib/_backport/sysconfig.cfg | 84 - .../_vendor/distlib/_backport/sysconfig.py | 786 -- .../pip/_vendor/distlib/_backport/tarfile.py | 2607 ------ .../pip/_vendor/distlib/compat.py | 1120 --- .../pip/_vendor/distlib/database.py | 1339 --- .../pip/_vendor/distlib/index.py | 516 - .../pip/_vendor/distlib/locators.py | 1302 --- .../pip/_vendor/distlib/manifest.py | 393 - .../pip/_vendor/distlib/markers.py | 131 - .../pip/_vendor/distlib/metadata.py | 1096 --- .../pip/_vendor/distlib/resources.py | 355 - .../pip/_vendor/distlib/scripts.py | 416 - .../site-packages/pip/_vendor/distlib/t32.exe | Bin 96768 -> 0 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 105984 -> 0 bytes .../site-packages/pip/_vendor/distlib/util.py | 1761 ---- .../pip/_vendor/distlib/version.py | 736 -- .../site-packages/pip/_vendor/distlib/w32.exe | Bin 90112 -> 0 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 99840 -> 0 bytes .../pip/_vendor/distlib/wheel.py | 1004 -- .../site-packages/pip/_vendor/distro.py | 1230 --- .../pip/_vendor/html5lib/__init__.py | 35 - .../pip/_vendor/html5lib/_ihatexml.py | 288 - .../pip/_vendor/html5lib/_inputstream.py | 923 -- .../pip/_vendor/html5lib/_tokenizer.py | 1721 ---- .../pip/_vendor/html5lib/_trie/__init__.py | 14 - .../pip/_vendor/html5lib/_trie/_base.py | 40 - .../pip/_vendor/html5lib/_trie/datrie.py | 44 - .../pip/_vendor/html5lib/_trie/py.py | 67 - .../pip/_vendor/html5lib/_utils.py | 124 - .../pip/_vendor/html5lib/constants.py | 2947 ------ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 29 - .../pip/_vendor/html5lib/filters/base.py | 12 - .../html5lib/filters/inject_meta_charset.py | 73 - .../pip/_vendor/html5lib/filters/lint.py | 93 - .../_vendor/html5lib/filters/optionaltags.py | 207 - .../pip/_vendor/html5lib/filters/sanitizer.py | 896 -- .../_vendor/html5lib/filters/whitespace.py | 38 - .../pip/_vendor/html5lib/html5parser.py | 2791 ------ .../pip/_vendor/html5lib/serializer.py | 409 - .../_vendor/html5lib/treeadapters/__init__.py | 30 - .../_vendor/html5lib/treeadapters/genshi.py | 54 - .../pip/_vendor/html5lib/treeadapters/sax.py | 50 - .../_vendor/html5lib/treebuilders/__init__.py | 88 - .../pip/_vendor/html5lib/treebuilders/base.py | 417 - .../pip/_vendor/html5lib/treebuilders/dom.py | 239 - .../_vendor/html5lib/treebuilders/etree.py | 340 - .../html5lib/treebuilders/etree_lxml.py | 366 - .../_vendor/html5lib/treewalkers/__init__.py | 154 - .../pip/_vendor/html5lib/treewalkers/base.py | 252 - .../pip/_vendor/html5lib/treewalkers/dom.py | 43 - .../pip/_vendor/html5lib/treewalkers/etree.py | 130 - .../html5lib/treewalkers/etree_lxml.py | 213 - .../_vendor/html5lib/treewalkers/genshi.py | 69 - .../pip/_vendor/idna/__init__.py | 2 - .../site-packages/pip/_vendor/idna/codec.py | 118 - .../site-packages/pip/_vendor/idna/compat.py | 12 - .../site-packages/pip/_vendor/idna/core.py | 398 - .../pip/_vendor/idna/idnadata.py | 1991 ---- .../pip/_vendor/idna/intranges.py | 53 - .../pip/_vendor/idna/package_data.py | 2 - .../pip/_vendor/idna/uts46data.py | 8317 ----------------- .../site-packages/pip/_vendor/ipaddress.py | 2420 ----- .../pip/_vendor/msgpack/__init__.py | 54 - .../pip/_vendor/msgpack/_version.py | 1 - .../pip/_vendor/msgpack/exceptions.py | 48 - .../site-packages/pip/_vendor/msgpack/ext.py | 191 - .../pip/_vendor/msgpack/fallback.py | 1063 --- .../pip/_vendor/packaging/__about__.py | 27 - .../pip/_vendor/packaging/__init__.py | 26 - .../pip/_vendor/packaging/_compat.py | 38 - .../pip/_vendor/packaging/_structures.py | 86 - .../pip/_vendor/packaging/_typing.py | 39 - .../pip/_vendor/packaging/markers.py | 328 - .../pip/_vendor/packaging/requirements.py | 145 - .../pip/_vendor/packaging/specifiers.py | 849 -- .../pip/_vendor/packaging/tags.py | 739 -- .../pip/_vendor/packaging/utils.py | 62 - .../pip/_vendor/packaging/version.py | 535 -- .../pip/_vendor/pep517/__init__.py | 4 - .../pip/_vendor/pep517/_in_process.py | 280 - .../site-packages/pip/_vendor/pep517/build.py | 124 - .../site-packages/pip/_vendor/pep517/check.py | 203 - .../pip/_vendor/pep517/colorlog.py | 115 - .../pip/_vendor/pep517/compat.py | 34 - .../pip/_vendor/pep517/dirtools.py | 44 - .../pip/_vendor/pep517/envbuild.py | 167 - .../site-packages/pip/_vendor/pep517/meta.py | 92 - .../pip/_vendor/pep517/wrappers.py | 308 - .../pip/_vendor/pkg_resources/__init__.py | 3296 ------- .../pip/_vendor/pkg_resources/py31compat.py | 23 - .../pip/_vendor/progress/__init__.py | 177 - .../site-packages/pip/_vendor/progress/bar.py | 91 - .../pip/_vendor/progress/counter.py | 41 - .../pip/_vendor/progress/spinner.py | 43 - .../site-packages/pip/_vendor/pyparsing.py | 7107 -------------- .../pip/_vendor/requests/__init__.py | 133 - .../pip/_vendor/requests/__version__.py | 14 - .../pip/_vendor/requests/_internal_utils.py | 42 - .../pip/_vendor/requests/adapters.py | 533 -- .../site-packages/pip/_vendor/requests/api.py | 161 - .../pip/_vendor/requests/auth.py | 305 - .../pip/_vendor/requests/certs.py | 18 - .../pip/_vendor/requests/compat.py | 76 - .../pip/_vendor/requests/cookies.py | 549 -- .../pip/_vendor/requests/exceptions.py | 126 - .../pip/_vendor/requests/help.py | 119 - .../pip/_vendor/requests/hooks.py | 34 - .../pip/_vendor/requests/models.py | 954 -- .../pip/_vendor/requests/packages.py | 16 - .../pip/_vendor/requests/sessions.py | 767 -- .../pip/_vendor/requests/status_codes.py | 123 - .../pip/_vendor/requests/structures.py | 105 - .../pip/_vendor/requests/utils.py | 982 -- .../pip/_vendor/resolvelib/__init__.py | 26 - .../pip/_vendor/resolvelib/providers.py | 121 - .../pip/_vendor/resolvelib/reporters.py | 36 - .../pip/_vendor/resolvelib/resolvers.py | 414 - .../pip/_vendor/resolvelib/structs.py | 68 - .../site-packages/pip/_vendor/retrying.py | 267 - .../site-packages/pip/_vendor/six.py | 980 -- .../site-packages/pip/_vendor/toml.py | 1039 -- .../pip/_vendor/toml/__init__.py | 21 - .../site-packages/pip/_vendor/toml/decoder.py | 945 -- .../site-packages/pip/_vendor/toml/encoder.py | 250 - .../site-packages/pip/_vendor/toml/ordered.py | 15 - .../site-packages/pip/_vendor/toml/tz.py | 21 - .../pip/_vendor/urllib3/__init__.py | 86 - .../pip/_vendor/urllib3/_collections.py | 336 - .../pip/_vendor/urllib3/connection.py | 414 - .../pip/_vendor/urllib3/connectionpool.py | 1051 --- .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 - .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 493 - .../contrib/_securetransport/low_level.py | 328 - .../pip/_vendor/urllib3/contrib/appengine.py | 314 - .../pip/_vendor/urllib3/contrib/ntlmpool.py | 121 - .../pip/_vendor/urllib3/contrib/pyopenssl.py | 498 - .../urllib3/contrib/securetransport.py | 859 -- .../pip/_vendor/urllib3/contrib/socks.py | 210 - .../pip/_vendor/urllib3/exceptions.py | 255 - .../pip/_vendor/urllib3/fields.py | 273 - .../pip/_vendor/urllib3/filepost.py | 98 - .../pip/_vendor/urllib3/packages/__init__.py | 5 - .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 52 - .../pip/_vendor/urllib3/packages/six.py | 1021 -- .../packages/ssl_match_hostname/__init__.py | 19 - .../ssl_match_hostname/_implementation.py | 160 - .../pip/_vendor/urllib3/poolmanager.py | 470 - .../pip/_vendor/urllib3/request.py | 171 - .../pip/_vendor/urllib3/response.py | 809 -- .../pip/_vendor/urllib3/util/__init__.py | 46 - .../pip/_vendor/urllib3/util/connection.py | 138 - .../pip/_vendor/urllib3/util/queue.py | 21 - .../pip/_vendor/urllib3/util/request.py | 135 - .../pip/_vendor/urllib3/util/response.py | 86 - .../pip/_vendor/urllib3/util/retry.py | 450 - .../pip/_vendor/urllib3/util/ssl_.py | 407 - .../pip/_vendor/urllib3/util/timeout.py | 258 - .../pip/_vendor/urllib3/util/url.py | 430 - .../pip/_vendor/urllib3/util/wait.py | 153 - .../site-packages/pip/_vendor/vendor.txt | 24 - .../pip/_vendor/webencodings/__init__.py | 342 - .../pip/_vendor/webencodings/labels.py | 231 - .../pip/_vendor/webencodings/mklabels.py | 59 - .../pip/_vendor/webencodings/tests.py | 153 - .../_vendor/webencodings/x_user_defined.py | 325 - .../site-packages/pkg_resources/__init__.py | 3307 ------- .../pkg_resources/_vendor/__init__.py | 0 .../pkg_resources/_vendor/appdirs.py | 608 -- .../_vendor/packaging/__about__.py | 21 - .../_vendor/packaging/__init__.py | 14 - .../_vendor/packaging/_compat.py | 30 - .../_vendor/packaging/_structures.py | 68 - .../_vendor/packaging/markers.py | 301 - .../_vendor/packaging/requirements.py | 127 - .../_vendor/packaging/specifiers.py | 774 -- .../pkg_resources/_vendor/packaging/utils.py | 14 - .../_vendor/packaging/version.py | 393 - .../pkg_resources/_vendor/pyparsing.py | 5742 ------------ .../pkg_resources/_vendor/six.py | 868 -- .../pkg_resources/extern/__init__.py | 66 - .../site-packages/pkg_resources/py2_warn.py | 16 - .../site-packages/pkg_resources/py31compat.py | 23 - .../setuptools-47.1.0.dist-info/INSTALLER | 1 - .../setuptools-47.1.0.dist-info/LICENSE | 19 - .../setuptools-47.1.0.dist-info/METADATA | 109 - .../setuptools-47.1.0.dist-info/RECORD | 196 - .../setuptools-47.1.0.dist-info/WHEEL | 5 - .../dependency_links.txt | 2 - .../entry_points.txt | 68 - .../setuptools-47.1.0.dist-info/top_level.txt | 3 - .../setuptools-47.1.0.dist-info/zip-safe | 1 - .../site-packages/setuptools/__init__.py | 232 - .../setuptools/_deprecation_warning.py | 7 - .../site-packages/setuptools/_imp.py | 82 - .../setuptools/_vendor/__init__.py | 0 .../setuptools/_vendor/ordered_set.py | 488 - .../setuptools/_vendor/packaging/__about__.py | 27 - .../setuptools/_vendor/packaging/__init__.py | 26 - .../setuptools/_vendor/packaging/_compat.py | 31 - .../_vendor/packaging/_structures.py | 68 - .../setuptools/_vendor/packaging/markers.py | 296 - .../_vendor/packaging/requirements.py | 138 - .../_vendor/packaging/specifiers.py | 749 -- .../setuptools/_vendor/packaging/tags.py | 404 - .../setuptools/_vendor/packaging/utils.py | 57 - .../setuptools/_vendor/packaging/version.py | 420 - .../setuptools/_vendor/pyparsing.py | 5742 ------------ .../site-packages/setuptools/_vendor/six.py | 868 -- .../site-packages/setuptools/archive_util.py | 175 - .../site-packages/setuptools/build_meta.py | 272 - .../site-packages/setuptools/cli-32.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/cli-64.exe | Bin 74752 -> 0 bytes .../site-packages/setuptools/cli.exe | Bin 65536 -> 0 bytes .../setuptools/command/__init__.py | 17 - .../site-packages/setuptools/command/alias.py | 80 - .../setuptools/command/bdist_egg.py | 509 - .../setuptools/command/bdist_rpm.py | 43 - .../setuptools/command/bdist_wininst.py | 30 - .../setuptools/command/build_clib.py | 101 - .../setuptools/command/build_ext.py | 330 - .../setuptools/command/build_py.py | 276 - .../setuptools/command/develop.py | 221 - .../setuptools/command/dist_info.py | 36 - .../setuptools/command/easy_install.py | 2354 ----- .../setuptools/command/egg_info.py | 721 -- .../setuptools/command/install.py | 125 - .../setuptools/command/install_egg_info.py | 62 - .../setuptools/command/install_lib.py | 122 - .../setuptools/command/install_scripts.py | 68 - .../setuptools/command/launcher manifest.xml | 15 - .../setuptools/command/py36compat.py | 136 - .../setuptools/command/register.py | 18 - .../setuptools/command/rotate.py | 66 - .../setuptools/command/saveopts.py | 22 - .../site-packages/setuptools/command/sdist.py | 252 - .../setuptools/command/setopt.py | 149 - .../site-packages/setuptools/command/test.py | 280 - .../setuptools/command/upload.py | 17 - .../setuptools/command/upload_docs.py | 206 - .../site-packages/setuptools/config.py | 700 -- .../site-packages/setuptools/dep_util.py | 25 - .../site-packages/setuptools/depends.py | 176 - .../site-packages/setuptools/dist.py | 1031 -- .../site-packages/setuptools/errors.py | 16 - .../site-packages/setuptools/extension.py | 57 - .../setuptools/extern/__init__.py | 66 - .../site-packages/setuptools/glob.py | 174 - .../site-packages/setuptools/gui-32.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/gui-64.exe | Bin 75264 -> 0 bytes .../site-packages/setuptools/gui.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/installer.py | 150 - .../site-packages/setuptools/launch.py | 35 - .../site-packages/setuptools/lib2to3_ex.py | 71 - .../site-packages/setuptools/monkey.py | 179 - .../site-packages/setuptools/msvc.py | 1825 ---- .../site-packages/setuptools/namespaces.py | 111 - .../site-packages/setuptools/package_index.py | 1140 --- .../site-packages/setuptools/py27compat.py | 60 - .../site-packages/setuptools/py31compat.py | 32 - .../site-packages/setuptools/py33compat.py | 59 - .../site-packages/setuptools/py34compat.py | 13 - .../site-packages/setuptools/sandbox.py | 492 - .../setuptools/script (dev).tmpl | 6 - .../site-packages/setuptools/script.tmpl | 3 - .../site-packages/setuptools/site-patch.py | 76 - .../site-packages/setuptools/ssl_support.py | 265 - .../site-packages/setuptools/unicode_utils.py | 44 - .../site-packages/setuptools/version.py | 6 - .../site-packages/setuptools/wheel.py | 217 - .../setuptools/windows_support.py | 29 - .../site-packages/werkzeug/__init__.py | 6 - .../site-packages/werkzeug/_internal.py | 626 -- .../site-packages/werkzeug/_reloader.py | 430 - .../site-packages/werkzeug/datastructures.py | 3059 ------ .../site-packages/werkzeug/datastructures.pyi | 912 -- .../site-packages/werkzeug/debug/__init__.py | 502 - .../site-packages/werkzeug/debug/console.py | 211 - .../site-packages/werkzeug/debug/repr.py | 284 - .../werkzeug/debug/shared/FONT_LICENSE | 96 - .../werkzeug/debug/shared/ICON_LICENSE.md | 6 - .../werkzeug/debug/shared/console.png | Bin 507 -> 0 bytes .../werkzeug/debug/shared/debugger.js | 359 - .../werkzeug/debug/shared/less.png | Bin 191 -> 0 bytes .../werkzeug/debug/shared/more.png | Bin 200 -> 0 bytes .../werkzeug/debug/shared/source.png | Bin 818 -> 0 bytes .../werkzeug/debug/shared/style.css | 163 - .../werkzeug/debug/shared/ubuntu.ttf | Bin 70220 -> 0 bytes .../site-packages/werkzeug/debug/tbtools.py | 600 -- .../site-packages/werkzeug/exceptions.py | 943 -- .../site-packages/werkzeug/filesystem.py | 55 - .../site-packages/werkzeug/formparser.py | 495 - .../python3.8/site-packages/werkzeug/http.py | 1388 --- .../python3.8/site-packages/werkzeug/local.py | 677 -- .../werkzeug/middleware/__init__.py | 22 - .../werkzeug/middleware/dispatcher.py | 78 - .../werkzeug/middleware/http_proxy.py | 230 - .../site-packages/werkzeug/middleware/lint.py | 420 - .../werkzeug/middleware/profiler.py | 139 - .../werkzeug/middleware/proxy_fix.py | 187 - .../werkzeug/middleware/shared_data.py | 320 - .../python3.8/site-packages/werkzeug/py.typed | 0 .../site-packages/werkzeug/routing.py | 2341 ----- .../site-packages/werkzeug/sansio/__init__.py | 0 .../werkzeug/sansio/multipart.py | 260 - .../site-packages/werkzeug/sansio/request.py | 548 -- .../site-packages/werkzeug/sansio/response.py | 704 -- .../site-packages/werkzeug/sansio/utils.py | 142 - .../site-packages/werkzeug/security.py | 247 - .../site-packages/werkzeug/serving.py | 1081 --- .../python3.8/site-packages/werkzeug/test.py | 1326 --- .../site-packages/werkzeug/testapp.py | 240 - .../python3.8/site-packages/werkzeug/urls.py | 1211 --- .../site-packages/werkzeug/user_agent.py | 47 - .../site-packages/werkzeug/useragents.py | 215 - .../python3.8/site-packages/werkzeug/utils.py | 1099 --- .../werkzeug/wrappers/__init__.py | 16 - .../site-packages/werkzeug/wrappers/accept.py | 14 - .../site-packages/werkzeug/wrappers/auth.py | 26 - .../werkzeug/wrappers/base_request.py | 36 - .../werkzeug/wrappers/base_response.py | 36 - .../werkzeug/wrappers/common_descriptors.py | 26 - .../site-packages/werkzeug/wrappers/cors.py | 26 - .../site-packages/werkzeug/wrappers/etag.py | 26 - .../site-packages/werkzeug/wrappers/json.py | 13 - .../werkzeug/wrappers/request.py | 660 -- .../werkzeug/wrappers/response.py | 890 -- .../werkzeug/wrappers/user_agent.py | 14 - .../python3.8/site-packages/werkzeug/wsgi.py | 982 -- venv/pyvenv.cfg | 3 - 716 files changed, 224642 deletions(-) delete mode 100644 venv/bin/Activate.ps1 delete mode 100644 venv/bin/activate delete mode 100644 venv/bin/activate.csh delete mode 100644 venv/bin/activate.fish delete mode 100755 venv/bin/easy_install delete mode 100755 venv/bin/easy_install-3.8 delete mode 100755 venv/bin/flask delete mode 100755 venv/bin/gunicorn delete mode 100755 venv/bin/pip delete mode 100755 venv/bin/pip3 delete mode 100755 venv/bin/pip3.8 delete mode 120000 venv/bin/python delete mode 120000 venv/bin/python3 delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/click/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/click/_compat.py delete mode 100644 venv/lib/python3.8/site-packages/click/_termui_impl.py delete mode 100644 venv/lib/python3.8/site-packages/click/_textwrap.py delete mode 100644 venv/lib/python3.8/site-packages/click/_unicodefun.py delete mode 100644 venv/lib/python3.8/site-packages/click/_winconsole.py delete mode 100644 venv/lib/python3.8/site-packages/click/core.py delete mode 100644 venv/lib/python3.8/site-packages/click/decorators.py delete mode 100644 venv/lib/python3.8/site-packages/click/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/click/formatting.py delete mode 100644 venv/lib/python3.8/site-packages/click/globals.py delete mode 100644 venv/lib/python3.8/site-packages/click/parser.py delete mode 100644 venv/lib/python3.8/site-packages/click/py.typed delete mode 100644 venv/lib/python3.8/site-packages/click/shell_completion.py delete mode 100644 venv/lib/python3.8/site-packages/click/termui.py delete mode 100644 venv/lib/python3.8/site-packages/click/testing.py delete mode 100644 venv/lib/python3.8/site-packages/click/types.py delete mode 100644 venv/lib/python3.8/site-packages/click/utils.py delete mode 100644 venv/lib/python3.8/site-packages/easy_install.py delete mode 100644 venv/lib/python3.8/site-packages/flask/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/flask/__main__.py delete mode 100644 venv/lib/python3.8/site-packages/flask/app.py delete mode 100644 venv/lib/python3.8/site-packages/flask/blueprints.py delete mode 100644 venv/lib/python3.8/site-packages/flask/cli.py delete mode 100644 venv/lib/python3.8/site-packages/flask/config.py delete mode 100644 venv/lib/python3.8/site-packages/flask/ctx.py delete mode 100644 venv/lib/python3.8/site-packages/flask/debughelpers.py delete mode 100644 venv/lib/python3.8/site-packages/flask/globals.py delete mode 100644 venv/lib/python3.8/site-packages/flask/helpers.py delete mode 100644 venv/lib/python3.8/site-packages/flask/json/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/flask/json/tag.py delete mode 100644 venv/lib/python3.8/site-packages/flask/logging.py delete mode 100644 venv/lib/python3.8/site-packages/flask/py.typed delete mode 100644 venv/lib/python3.8/site-packages/flask/scaffold.py delete mode 100644 venv/lib/python3.8/site-packages/flask/sessions.py delete mode 100644 venv/lib/python3.8/site-packages/flask/signals.py delete mode 100644 venv/lib/python3.8/site-packages/flask/templating.py delete mode 100644 venv/lib/python3.8/site-packages/flask/testing.py delete mode 100644 venv/lib/python3.8/site-packages/flask/typing.py delete mode 100644 venv/lib/python3.8/site-packages/flask/views.py delete mode 100644 venv/lib/python3.8/site-packages/flask/wrappers.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.8/site-packages/gunicorn-20.1.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/__main__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/base.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/pasterapp.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/app/wsgiapp.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/arbiter.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/config.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/debug.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/errors.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/glogging.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/body.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/errors.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/message.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/parser.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/unreader.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/http/wsgi.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/instrument/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/instrument/statsd.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/pidfile.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/reloader.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/sock.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/systemd.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/util.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/base.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/base_async.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/geventlet.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/ggevent.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/gthread.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/gtornado.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/sync.py delete mode 100644 venv/lib/python3.8/site-packages/gunicorn/workers/workertmp.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous-2.0.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/_json.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/encoding.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/exc.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/jws.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/py.typed delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/serializer.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/signer.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/timed.py delete mode 100644 venv/lib/python3.8/site-packages/itsdangerous/url_safe.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/_identifier.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/async_utils.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/bccache.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/compiler.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/constants.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/debug.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/defaults.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/environment.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/ext.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/filters.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/idtracking.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/lexer.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/loaders.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/meta.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/nativetypes.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/nodes.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/optimizer.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/parser.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/py.typed delete mode 100644 venv/lib/python3.8/site-packages/jinja2/runtime.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/sandbox.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/tests.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/utils.py delete mode 100644 venv/lib/python3.8/site-packages/jinja2/visitor.py delete mode 100644 venv/lib/python3.8/site-packages/markupsafe/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/markupsafe/_native.py delete mode 100644 venv/lib/python3.8/site-packages/markupsafe/_speedups.c delete mode 100755 venv/lib/python3.8/site-packages/markupsafe/_speedups.cpython-38-darwin.so delete mode 100644 venv/lib/python3.8/site-packages/markupsafe/_speedups.pyi delete mode 100644 venv/lib/python3.8/site-packages/markupsafe/py.typed delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/LICENSE.txt delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.8/site-packages/pip-20.1.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/pip/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/__main__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/build_env.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/base_command.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/command_context.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/main.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/parser.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/progress_bars.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/req_command.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/spinners.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/check.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/completion.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/configuration.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/debug.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/download.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/freeze.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/hash.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/help.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/install.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/list.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/search.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/show.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/commands/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/configuration.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/installed.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/collector.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/index/package_finder.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/locations.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/main.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/candidate.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/direct_url.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/format_control.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/index.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/link.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/scheme.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/search_scope.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/target_python.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/models/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/auth.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/download.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/session.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/utils.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/check.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/freeze.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/operations/prepare.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/pyproject.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/constructors.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_file.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_install.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_set.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/legacy/resolver.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/candidates.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/factory.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/provider.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/requirements.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/resolver.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/compatibility_tags.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/direct_url_helpers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/encoding.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/glibc.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/hashes.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/logging.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/misc.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/models.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/packaging.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/typing.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/urls.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/utils/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/git.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_internal/wheel_builder.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/appdirs.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/_cmd.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/adapter.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/controller.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/filewrapper.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/heuristics.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/serialize.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/cachecontrol/wrapper.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/__main__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/cacert.pem delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/certifi/core.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/big5freq.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/big5prober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/chardistribution.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/charsetgroupprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/charsetprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cli/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cli/chardetect.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/codingstatemachine.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/cp949prober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/enums.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/escprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/escsm.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/eucjpprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euckrfreq.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euckrprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euctwfreq.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/euctwprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/gb2312freq.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/gb2312prober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/hebrewprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/jisfreq.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/jpcntx.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langbulgarianmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langcyrillicmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langgreekmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langhebrewmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langhungarianmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langthaimodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/langturkishmodel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/latin1prober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcharsetprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcsgroupprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/mbcssm.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sbcharsetprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sbcsgroupprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/sjisprober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/universaldetector.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/utf8prober.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/chardet/version.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansi.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansitowin32.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/initialise.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/win32.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/colorama/winterm.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/contextlib2.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/misc.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/shutil.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/sysconfig.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/database.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/index.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/locators.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/manifest.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/markers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/metadata.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/resources.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/scripts.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/t32.exe delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/t64.exe delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/util.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/version.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/w32.exe delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distlib/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/distro.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_ihatexml.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_inputstream.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_tokenizer.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/_base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/datrie.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_trie/py.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/_utils.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/constants.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/lint.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/optionaltags.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/sanitizer.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/serializer.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treeadapters/sax.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/dom.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/etree.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/base.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/dom.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/etree.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/codec.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/core.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/idnadata.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/intranges.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/package_data.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/idna/uts46data.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/ipaddress.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/_version.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/ext.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/msgpack/fallback.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/_typing.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/tags.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/_in_process.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/build.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/check.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/colorlog.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/dirtools.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/envbuild.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/meta.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pep517/wrappers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pkg_resources/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pkg_resources/py31compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/bar.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/counter.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/progress/spinner.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/__version__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/_internal_utils.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/adapters.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/api.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/auth.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/certs.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/compat.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/cookies.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/help.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/hooks.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/models.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/packages.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/sessions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/status_codes.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/structures.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/requests/utils.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/providers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/reporters.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/resolvers.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/resolvelib/structs.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/retrying.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/six.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/decoder.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/encoder.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/ordered.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/toml/tz.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/_collections.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/connection.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/connectionpool.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/appengine.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/securetransport.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/socks.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/fields.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/filepost.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/six.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/poolmanager.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/request.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/response.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/connection.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/queue.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/request.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/response.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/retry.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/ssl_.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/timeout.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/url.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/urllib3/util/wait.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/vendor.txt delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/labels.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/mklabels.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/tests.py delete mode 100644 venv/lib/python3.8/site-packages/pip/_vendor/webencodings/x_user_defined.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/_vendor/six.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/extern/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/py2_warn.py delete mode 100644 venv/lib/python3.8/site-packages/pkg_resources/py31compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/METADATA delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/RECORD delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/dependency_links.txt delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.8/site-packages/setuptools-47.1.0.dist-info/zip-safe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_deprecation_warning.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_imp.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/_vendor/six.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/archive_util.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/build_meta.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/cli-32.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/cli-64.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/cli.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/alias.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_egg.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_clib.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_ext.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/build_py.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/develop.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/dist_info.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/easy_install.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/egg_info.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_egg_info.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_lib.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/install_scripts.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/py36compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/register.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/rotate.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/saveopts.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/sdist.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/setopt.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/test.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/upload.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/command/upload_docs.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/config.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/dep_util.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/depends.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/dist.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/errors.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/extension.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/extern/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/glob.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/gui-32.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/gui-64.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/gui.exe delete mode 100644 venv/lib/python3.8/site-packages/setuptools/installer.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/launch.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/lib2to3_ex.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/monkey.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/msvc.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/namespaces.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/package_index.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/py27compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/py31compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/py33compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/py34compat.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/sandbox.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/script (dev).tmpl delete mode 100644 venv/lib/python3.8/site-packages/setuptools/script.tmpl delete mode 100644 venv/lib/python3.8/site-packages/setuptools/site-patch.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/ssl_support.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/unicode_utils.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/version.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/wheel.py delete mode 100644 venv/lib/python3.8/site-packages/setuptools/windows_support.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/_internal.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/_reloader.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/datastructures.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/datastructures.pyi delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/console.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/repr.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/FONT_LICENSE delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/ICON_LICENSE.md delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/console.png delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/debugger.js delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/less.png delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/more.png delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/source.png delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/style.css delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/shared/ubuntu.ttf delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/debug/tbtools.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/exceptions.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/filesystem.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/formparser.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/http.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/local.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/py.typed delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/routing.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/request.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/response.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/security.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/serving.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/test.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/testapp.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/urls.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/user_agent.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/useragents.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/utils.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py delete mode 100644 venv/lib/python3.8/site-packages/werkzeug/wsgi.py delete mode 100644 venv/pyvenv.cfg diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 deleted file mode 100644 index 2fb3852..0000000 --- a/venv/bin/Activate.ps1 +++ /dev/null @@ -1,241 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate deleted file mode 100644 index 8ef985e..0000000 --- a/venv/bin/activate +++ /dev/null @@ -1,76 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r - fi - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - if [ "x(venv) " != x ] ; then - PS1="(venv) ${PS1:-}" - else - if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" - else - PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" - fi - fi - export PS1 -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r -fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh deleted file mode 100644 index 0924157..0000000 --- a/venv/bin/activate.csh +++ /dev/null @@ -1,37 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - if ("venv" != "") then - set env_name = "venv" - else - if (`basename "VIRTUAL_ENV"` == "__") then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` - else - set env_name = `basename "$VIRTUAL_ENV"` - endif - endif - set prompt = "[$env_name] $prompt" - unset env_name -endif - -alias pydoc python -m pydoc - -rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish deleted file mode 100644 index 02cb8c5..0000000 --- a/venv/bin/activate.fish +++ /dev/null @@ -1,75 +0,0 @@ -# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) -# you cannot run it directly - -function deactivate -d "Exit virtualenv and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - functions -e fish_prompt - set -e _OLD_FISH_PROMPT_OVERRIDE - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - - set -e VIRTUAL_ENV - if test "$argv[1]" != "nondestructive" - # Self destruct! - functions -e deactivate - end -end - -# unset irrelevant variables -deactivate nondestructive - -set -gx VIRTUAL_ENV "/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# unset PYTHONHOME if set -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # save the current fish_prompt function as the function _old_fish_prompt - functions -c fish_prompt _old_fish_prompt - - # with the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command - set -l old_status $status - - # Prompt override? - if test -n "(venv) " - printf "%s%s" "(venv) " (set_color normal) - else - # ...Otherwise, prepend env - set -l _checkbase (basename "$VIRTUAL_ENV") - if test $_checkbase = "__" - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) - else - printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) - end - end - - # Restore the return status of the previous command. - echo "exit $old_status" | . - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" -end diff --git a/venv/bin/easy_install b/venv/bin/easy_install deleted file mode 100755 index 5ab24c3..0000000 --- a/venv/bin/easy_install +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from setuptools.command.easy_install import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/easy_install-3.8 b/venv/bin/easy_install-3.8 deleted file mode 100755 index 5ab24c3..0000000 --- a/venv/bin/easy_install-3.8 +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from setuptools.command.easy_install import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/flask b/venv/bin/flask deleted file mode 100755 index 5688306..0000000 --- a/venv/bin/flask +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from flask.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/gunicorn b/venv/bin/gunicorn deleted file mode 100755 index e097b30..0000000 --- a/venv/bin/gunicorn +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from gunicorn.app.wsgiapp import run -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(run()) diff --git a/venv/bin/pip b/venv/bin/pip deleted file mode 100755 index a9a252d..0000000 --- a/venv/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 deleted file mode 100755 index a9a252d..0000000 --- a/venv/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3.8 b/venv/bin/pip3.8 deleted file mode 100755 index a9a252d..0000000 --- a/venv/bin/pip3.8 +++ /dev/null @@ -1,8 +0,0 @@ -#!/Users/indraneel/Desktop/devOps/heroku/basic-flask-app/venv/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python deleted file mode 120000 index 778bb6a..0000000 --- a/venv/bin/python +++ /dev/null @@ -1 +0,0 @@ -/Users/indraneel/opt/anaconda3/bin/python \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 deleted file mode 120000 index d8654aa..0000000 --- a/venv/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -python \ No newline at end of file diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA deleted file mode 100644 index aaf27ca..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/METADATA +++ /dev/null @@ -1,125 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask -Version: 2.0.2 -Summary: A simple framework for building complex web applications. -Home-page: https://palletsprojects.com/p/flask -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Changes, https://flask.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/flask/ -Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: Werkzeug (>=2.0) -Requires-Dist: Jinja2 (>=3.0) -Requires-Dist: itsdangerous (>=2.0) -Requires-Dist: click (>=7.1.2) -Provides-Extra: async -Requires-Dist: asgiref (>=3.2) ; extra == 'async' -Provides-Extra: dotenv -Requires-Dist: python-dotenv ; extra == 'dotenv' - -Flask -===== - -Flask is a lightweight `WSGI`_ web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around `Werkzeug`_ -and `Jinja`_ and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -.. _WSGI: https://wsgi.readthedocs.io/ -.. _Werkzeug: https://werkzeug.palletsprojects.com/ -.. _Jinja: https://jinja.palletsprojects.com/ - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Flask - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - # save this as app.py - from flask import Flask - - app = Flask(__name__) - - @app.route("/") - def hello(): - return "Hello, World!" - -.. code-block:: text - - $ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) - - -Contributing ------------- - -For guidance on setting up a development environment and how to make a -contribution to Flask, see the `contributing guidelines`_. - -.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst - - -Donate ------- - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://flask.palletsprojects.com/ -- Changes: https://flask.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Flask/ -- Source Code: https://github.com/pallets/flask/ -- Issue Tracker: https://github.com/pallets/flask/issues/ -- Website: https://palletsprojects.com/p/flask/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD deleted file mode 100644 index 9bdc8b6..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/RECORD +++ /dev/null @@ -1,51 +0,0 @@ -../../../bin/flask,sha256=YvjprXzaHMVjHfJ2hgj3Y9r3ND3U4YQ97BDpyDoPtqU,262 -Flask-2.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask-2.0.2.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -Flask-2.0.2.dist-info/METADATA,sha256=aKsvjFA_ZjZN1jLh1Ac3aQk-ZUZDPrrwo_TGYW1kdAQ,3839 -Flask-2.0.2.dist-info/RECORD,, -Flask-2.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -Flask-2.0.2.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 -Flask-2.0.2.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 -flask/__init__.py,sha256=9ZCelLoNCpr6eSuLmYlzvbp12B3lrLgoN5U2UWk1vdo,2251 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-38.pyc,, -flask/__pycache__/__main__.cpython-38.pyc,, -flask/__pycache__/app.cpython-38.pyc,, -flask/__pycache__/blueprints.cpython-38.pyc,, -flask/__pycache__/cli.cpython-38.pyc,, -flask/__pycache__/config.cpython-38.pyc,, -flask/__pycache__/ctx.cpython-38.pyc,, -flask/__pycache__/debughelpers.cpython-38.pyc,, -flask/__pycache__/globals.cpython-38.pyc,, -flask/__pycache__/helpers.cpython-38.pyc,, -flask/__pycache__/logging.cpython-38.pyc,, -flask/__pycache__/scaffold.cpython-38.pyc,, -flask/__pycache__/sessions.cpython-38.pyc,, -flask/__pycache__/signals.cpython-38.pyc,, -flask/__pycache__/templating.cpython-38.pyc,, -flask/__pycache__/testing.cpython-38.pyc,, -flask/__pycache__/typing.cpython-38.pyc,, -flask/__pycache__/views.cpython-38.pyc,, -flask/__pycache__/wrappers.cpython-38.pyc,, -flask/app.py,sha256=ectBbi9hGmVHAse5TNcFQZIDRkDAxYUAnLgfuKD0Xws,81975 -flask/blueprints.py,sha256=AkAVXZ_MMkjwjklzCAMdBNowTiM0wVQPynnUnXjTL2M,23781 -flask/cli.py,sha256=wn2Un9RO32ZfRmCMem5KJ5h62-5lnmy1H9uxgyV-eBs,32238 -flask/config.py,sha256=70Uyjh1Jzb9MfTCT7NDhuZWAzyIEu-TIyk6-22MP3zQ,11285 -flask/ctx.py,sha256=EM3W0v1ctuFQAGk_HWtQdoJEg_r2f5Le4xcmElxFwwk,17428 -flask/debughelpers.py,sha256=W82-xrRmodjopBngI9roYH-q08EbQwN2HEGfDAi6SA0,6184 -flask/globals.py,sha256=cWd-R2hUH3VqPhnmQNww892tQS6Yjqg_wg8UvW1M7NM,1723 -flask/helpers.py,sha256=00WqA3wYeyjMrnAOPZTUyrnUf7H8ik3CVT0kqGl_qjk,30589 -flask/json/__init__.py,sha256=unAKdZBlxMI5OMiTU0-Z2Hl4CF1CMJmqTUzpStiExNw,11822 -flask/json/__pycache__/__init__.cpython-38.pyc,, -flask/json/__pycache__/tag.cpython-38.pyc,, -flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857 -flask/logging.py,sha256=1o_hirVGqdj7SBdETnhX7IAjklG89RXlrwz_2CjzQQE,2273 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/scaffold.py,sha256=fM9mRy7QBh9fhJ0VTogVx900dDa5oxz8FOw6OK5F-TU,32796 -flask/sessions.py,sha256=Kb7zY4qBIOU2cw1xM5mQ_KmgYUBDFbUYWjlkq0EFYis,15189 -flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136 -flask/templating.py,sha256=l96VD39JQ0nue4Bcj7wZ4-FWWs-ppLxvgBCpwDQ4KAk,5626 -flask/testing.py,sha256=OsHT-2B70abWH3ulY9IbhLchXIeyj3L-cfcDa88wv5E,10281 -flask/typing.py,sha256=hXEVcXoH-QEabmy1F11pYaQ2SonlkMAwfjBAnqj2x18,1982 -flask/views.py,sha256=nhq31TRB5Z-z2mjFGZACaaB2Et5XPCmWhWxJxOvLWww,5948 -flask/wrappers.py,sha256=VndbHPRBSUUOejmd2Y3ydkoCVUtsS2OJIdJEVIkBVD8,5604 diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt deleted file mode 100644 index 1eb0252..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask = flask.cli:main - diff --git a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt deleted file mode 100644 index 7e10602..0000000 --- a/venv/lib/python3.8/site-packages/Flask-2.0.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA deleted file mode 100644 index 3b9355a..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/METADATA +++ /dev/null @@ -1,113 +0,0 @@ -Metadata-Version: 2.1 -Name: Jinja2 -Version: 3.0.3 -Summary: A very fast and expressive template engine. -Home-page: https://palletsprojects.com/p/jinja/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/jinja/ -Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: MarkupSafe (>=2.0) -Provides-Extra: i18n -Requires-Dist: Babel (>=2.7) ; extra == 'i18n' - -Jinja -===== - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Jinja2 - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -In A Nutshell -------------- - -.. code-block:: jinja - - {% extends "base.html" %} - {% block title %}Members{% endblock %} - {% block content %} - - {% endblock %} - - -Donate ------- - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://jinja.palletsprojects.com/ -- Changes: https://jinja.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Jinja2/ -- Source Code: https://github.com/pallets/jinja/ -- Issue Tracker: https://github.com/pallets/jinja/issues/ -- Website: https://palletsprojects.com/p/jinja/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD deleted file mode 100644 index e60c78b..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -Jinja2-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Jinja2-3.0.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Jinja2-3.0.3.dist-info/METADATA,sha256=uvKoBSMLvh0qHK-6khEqSe1yOV4jxFzbPSREOp-3BXk,3539 -Jinja2-3.0.3.dist-info/RECORD,, -Jinja2-3.0.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -Jinja2-3.0.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61 -Jinja2-3.0.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 -jinja2/__init__.py,sha256=V3JjnTV-nyIHN6rwj03N1M11fegjGvv-weiHMQwH1pk,2205 -jinja2/__pycache__/__init__.cpython-38.pyc,, -jinja2/__pycache__/_identifier.cpython-38.pyc,, -jinja2/__pycache__/async_utils.cpython-38.pyc,, -jinja2/__pycache__/bccache.cpython-38.pyc,, -jinja2/__pycache__/compiler.cpython-38.pyc,, -jinja2/__pycache__/constants.cpython-38.pyc,, -jinja2/__pycache__/debug.cpython-38.pyc,, -jinja2/__pycache__/defaults.cpython-38.pyc,, -jinja2/__pycache__/environment.cpython-38.pyc,, -jinja2/__pycache__/exceptions.cpython-38.pyc,, -jinja2/__pycache__/ext.cpython-38.pyc,, -jinja2/__pycache__/filters.cpython-38.pyc,, -jinja2/__pycache__/idtracking.cpython-38.pyc,, -jinja2/__pycache__/lexer.cpython-38.pyc,, -jinja2/__pycache__/loaders.cpython-38.pyc,, -jinja2/__pycache__/meta.cpython-38.pyc,, -jinja2/__pycache__/nativetypes.cpython-38.pyc,, -jinja2/__pycache__/nodes.cpython-38.pyc,, -jinja2/__pycache__/optimizer.cpython-38.pyc,, -jinja2/__pycache__/parser.cpython-38.pyc,, -jinja2/__pycache__/runtime.cpython-38.pyc,, -jinja2/__pycache__/sandbox.cpython-38.pyc,, -jinja2/__pycache__/tests.cpython-38.pyc,, -jinja2/__pycache__/utils.cpython-38.pyc,, -jinja2/__pycache__/visitor.cpython-38.pyc,, -jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775 -jinja2/async_utils.py,sha256=jBcJSmLoQa2PjJdNcOpwaUmBxFNE9rZNwMF7Ob3dP9I,1947 -jinja2/bccache.py,sha256=v5rKAlYxIvfJEa0uGzAC6yCYSS3KuXT5Eqi-n9qvNi8,12670 -jinja2/compiler.py,sha256=v7zKz-mgSYXmfXD9mRmi2BU0B6Z-1RGZmOXCrsPKzc0,72209 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=r0JL0vfO7HPlyKZEdr6eVlg7HoIg2OQGmJ7SeUEyAeI,8494 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=Vz20npBX5-SUH_eguQuxrSQDEsLFjho0qcHLdMhY3hA,60983 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=44SjDjeYkkxQTpmC2BetOTxEFMgQ42p2dfSwXmPFcSo,32122 -jinja2/filters.py,sha256=jusKTZbd0ddZMaibZkxMUVKNsOsaYtOq_Il8Imtx4BE,52609 -jinja2/idtracking.py,sha256=WekexMql3u5n3vDxFsQ_i8HW0j24AtjWTjrPBLWrHww,10721 -jinja2/lexer.py,sha256=qNEQqDQw_zO5EaH6rFQsER7Qwn2du0o22prB-TR11HE,29930 -jinja2/loaders.py,sha256=1MjXJOU6p4VywFqtpDZhtvtT_vIlmHnZKMKHHw4SZzA,22754 -jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 -jinja2/nativetypes.py,sha256=KCJl71MogrDih_BHBu6xV5p7Cr_jggAgu-shKTg6L28,3969 -jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 -jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 -jinja2/parser.py,sha256=kHnU8v92GwMYkfr0MVakWv8UlSf_kJPx8LUsgQMof70,39767 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=wVRlkEmAgNU67AIQDqLvI6UkNLkzDqpLA-z4Mi3vl3g,35054 -jinja2/sandbox.py,sha256=-8zxR6TO9kUkciAVFsIKu8Oq-C7PTeYEdZ5TtA55-gw,14600 -jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 -jinja2/utils.py,sha256=udQxWIKaq4QDCZiXN31ngKOaGGdaMA5fl0JMaM-F6fg,26971 -jinja2/visitor.py,sha256=ZmeLuTj66ic35-uFH-1m0EKXiw4ObDDb_WuE6h5vPFg,3572 diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt deleted file mode 100644 index 3619483..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[babel.extractors] -jinja2 = jinja2.ext:babel_extract [i18n] - diff --git a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt deleted file mode 100644 index 7f7afbf..0000000 --- a/venv/lib/python3.8/site-packages/Jinja2-3.0.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -jinja2 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA deleted file mode 100644 index ef44e2b..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/METADATA +++ /dev/null @@ -1,100 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 2.0.1 -Summary: Safely add untrusted strings to HTML/XML markup. -Home-page: https://palletsprojects.com/p/markupsafe/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/markupsafe/ -Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst - -MarkupSafe -========== - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U MarkupSafe - -.. _pip: https://pip.pypa.io/en/stable/quickstart/ - - -Examples --------- - -.. code-block:: pycon - - >>> from markupsafe import Markup, escape - - >>> # escape replaces special characters and wraps in Markup - >>> escape("") - Markup('<script>alert(document.cookie);</script>') - - >>> # wrap in Markup to mark text "safe" and prevent escaping - >>> Markup("Hello") - Markup('hello') - - >>> escape(Markup("Hello")) - Markup('hello') - - >>> # Markup is a str subclass - >>> # methods and operators escape their arguments - >>> template = Markup("Hello {name}") - >>> template.format(name='"World"') - Markup('Hello "World"') - - -Donate ------- - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://markupsafe.palletsprojects.com/ -- Changes: https://markupsafe.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/MarkupSafe/ -- Source Code: https://github.com/pallets/markupsafe/ -- Issue Tracker: https://github.com/pallets/markupsafe/issues/ -- Website: https://palletsprojects.com/p/markupsafe/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD deleted file mode 100644 index 274f151..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -MarkupSafe-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-2.0.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -MarkupSafe-2.0.1.dist-info/METADATA,sha256=FmPpxBdaqCCjF-XKqoxeEzqAzhetQnrkkSsd3V3X-Jc,3211 -MarkupSafe-2.0.1.dist-info/RECORD,, -MarkupSafe-2.0.1.dist-info/WHEEL,sha256=RA4ju32EWHpO-G1BhxTQ3AwXQWjnnrzYGkM9skoN7_I,109 -MarkupSafe-2.0.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=9Tez4UIlI7J6_sQcUFK1dKniT_b_8YefpGIyYJ3Sr2Q,8923 -markupsafe/__pycache__/__init__.cpython-38.pyc,, -markupsafe/__pycache__/_native.cpython-38.pyc,, -markupsafe/_native.py,sha256=GTKEV-bWgZuSjklhMHOYRHU9k0DMewTf5mVEZfkbuns,1986 -markupsafe/_speedups.c,sha256=CDDtwaV21D2nYtypnMQzxvvpZpcTvIs8OZ6KDa1g4t0,7400 -markupsafe/_speedups.cpython-38-darwin.so,sha256=Tjek-7TDXevv7qgNBsLLUfsOQlfCSCGzhhV5cCUrIxk,35480 -markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL deleted file mode 100644 index d70ba8e..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.36.2) -Root-Is-Purelib: false -Tag: cp38-cp38-macosx_10_9_x86_64 - diff --git a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/venv/lib/python3.8/site-packages/MarkupSafe-2.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA deleted file mode 100644 index b58b9bd..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/METADATA +++ /dev/null @@ -1,129 +0,0 @@ -Metadata-Version: 2.1 -Name: Werkzeug -Version: 2.0.2 -Summary: The comprehensive WSGI web application library. -Home-page: https://palletsprojects.com/p/werkzeug/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://werkzeug.palletsprojects.com/ -Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/werkzeug/ -Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: dataclasses ; python_version < "3.7" -Provides-Extra: watchdog -Requires-Dist: watchdog ; extra == 'watchdog' - -Werkzeug -======== - -*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") - -Werkzeug is a comprehensive `WSGI`_ web application library. It began as -a simple collection of various utilities for WSGI applications and has -become one of the most advanced WSGI utility libraries. - -It includes: - -- An interactive debugger that allows inspecting stack traces and - source code in the browser with an interactive interpreter for any - frame in the stack. -- A full-featured request object with objects to interact with - headers, query args, form data, files, and cookies. -- A response object that can wrap other WSGI applications and handle - streaming data. -- A routing system for matching URLs to endpoints and generating URLs - for endpoints, with an extensible system for capturing variables - from URLs. -- HTTP utilities to handle entity tags, cache control, dates, user - agents, cookies, files, and more. -- A threaded WSGI server for use while developing applications - locally. -- A test client for simulating HTTP requests during testing without - requiring running a server. - -Werkzeug doesn't enforce any dependencies. It is up to the developer to -choose a template engine, database adapter, and even how to handle -requests. It can be used to build all sorts of end user applications -such as blogs, wikis, or bulletin boards. - -`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while -providing more structure and patterns for defining powerful -applications. - -.. _WSGI: https://wsgi.readthedocs.io/en/latest/ -.. _Flask: https://www.palletsprojects.com/p/flask/ - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U Werkzeug - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - from werkzeug.wrappers import Request, Response - - @Request.application - def application(request): - return Response('Hello, World!') - - if __name__ == '__main__': - from werkzeug.serving import run_simple - run_simple('localhost', 4000, application) - - -Donate ------- - -The Pallets organization develops and supports Werkzeug and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://werkzeug.palletsprojects.com/ -- Changes: https://werkzeug.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Werkzeug/ -- Source Code: https://github.com/pallets/werkzeug/ -- Issue Tracker: https://github.com/pallets/werkzeug/issues/ -- Website: https://palletsprojects.com/p/werkzeug/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD deleted file mode 100644 index a566dd6..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/RECORD +++ /dev/null @@ -1,111 +0,0 @@ -Werkzeug-2.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Werkzeug-2.0.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Werkzeug-2.0.2.dist-info/METADATA,sha256=vh_xrARtpmkFYnWRAgfSiHgl66LH143rMfAfPZo-R_E,4452 -Werkzeug-2.0.2.dist-info/RECORD,, -Werkzeug-2.0.2.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -Werkzeug-2.0.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 -werkzeug/__init__.py,sha256=Wx1PLCftJ7UAS0fBXEO4Prdr6kvEQ124Stwg-XwyhW4,188 -werkzeug/__pycache__/__init__.cpython-38.pyc,, -werkzeug/__pycache__/_internal.cpython-38.pyc,, -werkzeug/__pycache__/_reloader.cpython-38.pyc,, -werkzeug/__pycache__/datastructures.cpython-38.pyc,, -werkzeug/__pycache__/exceptions.cpython-38.pyc,, -werkzeug/__pycache__/filesystem.cpython-38.pyc,, -werkzeug/__pycache__/formparser.cpython-38.pyc,, -werkzeug/__pycache__/http.cpython-38.pyc,, -werkzeug/__pycache__/local.cpython-38.pyc,, -werkzeug/__pycache__/routing.cpython-38.pyc,, -werkzeug/__pycache__/security.cpython-38.pyc,, -werkzeug/__pycache__/serving.cpython-38.pyc,, -werkzeug/__pycache__/test.cpython-38.pyc,, -werkzeug/__pycache__/testapp.cpython-38.pyc,, -werkzeug/__pycache__/urls.cpython-38.pyc,, -werkzeug/__pycache__/user_agent.cpython-38.pyc,, -werkzeug/__pycache__/useragents.cpython-38.pyc,, -werkzeug/__pycache__/utils.cpython-38.pyc,, -werkzeug/__pycache__/wsgi.cpython-38.pyc,, -werkzeug/_internal.py,sha256=_QKkvdaG4pDFwK68c0EpPzYJGe9Y7toRAT1cBbC-CxU,18572 -werkzeug/_reloader.py,sha256=B1hEfgsUOz2IginBQM5Zak_eaIF7gr3GS5-0x2OHvAE,13950 -werkzeug/datastructures.py,sha256=m79A8rHQEt5B7qVqyrjARXzHL66Katn8S92urGscTw4,97929 -werkzeug/datastructures.pyi,sha256=CoVwrQ2Vr9JnbprNL9aE3vOz8mOejT9qysQ-BT53C8Y,34089 -werkzeug/debug/__init__.py,sha256=jYA1e1Gw_8EPOytr-BoMdmm0rzP-Z1H0Ih7wIObnKwQ,17968 -werkzeug/debug/__pycache__/__init__.cpython-38.pyc,, -werkzeug/debug/__pycache__/console.cpython-38.pyc,, -werkzeug/debug/__pycache__/repr.cpython-38.pyc,, -werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,, -werkzeug/debug/console.py,sha256=E1nBMEvFkX673ShQjPtVY-byYatfX9MN-dBMjRI8a8E,5897 -werkzeug/debug/repr.py,sha256=QCSHENKsChEZDCIApkVi_UNjhJ77v8BMXK1OfxO189M,9483 -werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 -werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 -werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 -werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 -werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 -werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 -werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 -werkzeug/debug/shared/style.css,sha256=h1ZSUVaKNpfbfcYzRb513WAhPySGDQom1uih3uEDxPw,6704 -werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 -werkzeug/debug/tbtools.py,sha256=AFRrjLDCAps7G5K2-RxNZpXXaEoeFHm68T00f4vlDYA,19362 -werkzeug/exceptions.py,sha256=CUwx0pBiNbk4f9cON17ekgKnmLi6HIVFjUmYZc2x0wM,28681 -werkzeug/filesystem.py,sha256=JS2Dv2QF98WILxY4_thHl-WMcUcwluF_4igkDPaP1l4,1956 -werkzeug/formparser.py,sha256=X-p3Ek4ji8XrKrbmaWxr8StLSc6iuksbpIeweaabs4s,17400 -werkzeug/http.py,sha256=oUCXFFMnkOQ-cHbUY_aiqitshcrSzNDq3fEMf1VI_yk,45141 -werkzeug/local.py,sha256=bwL-y3-qOZAspJ66W1P36SUApLXJy3UY8nLYbM9kfmY,23183 -werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500 -werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,, -werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,, -werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,, -werkzeug/middleware/__pycache__/lint.cpython-38.pyc,, -werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,, -werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,, -werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,, -werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580 -werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558 -werkzeug/middleware/lint.py,sha256=sAg3GcOhICIkwYX5bJGG8n8iebX0Yipq_UH0HvrBvoU,13964 -werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899 -werkzeug/middleware/proxy_fix.py,sha256=uRgQ3dEvFV8JxUqajHYYYOPEeA_BFqaa51Yp8VW0uzA,6849 -werkzeug/middleware/shared_data.py,sha256=xydEqOhAGg0aQJEllPDVfz2-8jHwWvJpAxfPsfPCu7k,10960 -werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -werkzeug/routing.py,sha256=oqJ32sWIZtIF6zbqfrnwB1Pbv2ShNwPDJd6FYqxdYVo,84527 -werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,, -werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,, -werkzeug/sansio/__pycache__/request.cpython-38.pyc,, -werkzeug/sansio/__pycache__/response.cpython-38.pyc,, -werkzeug/sansio/__pycache__/utils.cpython-38.pyc,, -werkzeug/sansio/multipart.py,sha256=bJMCNC2f5xyAaylahNViJ0JqmV4ThLRbDVGVzKwcqrQ,8751 -werkzeug/sansio/request.py,sha256=aA9rABkWiG4MhYMByanst2NXkEclsq8SIxhb0LQf0e0,20228 -werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098 -werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164 -werkzeug/security.py,sha256=gPDRuCjkjWrcqj99tBMq8_nHFZLFQjgoW5Ga5XIw9jo,8158 -werkzeug/serving.py,sha256=AfgLn0yKr9qXknmwO-0KXJ055oloS4h5DIFDHEu8iHA,38088 -werkzeug/test.py,sha256=8gE1l-Y9yAh2i3SI0kgpxIaI4oYZuehIkxxyDFcz6J0,48123 -werkzeug/testapp.py,sha256=f48prWSGJhbSrvYb8e1fnAah4BkrLb0enHSdChgsjBY,9471 -werkzeug/urls.py,sha256=Du2lreBHvgBh5c2_bcx72g3hzV2ZabXYZsp-picUIJs,41023 -werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420 -werkzeug/useragents.py,sha256=G8tmv_6vxJaPrLQH3eODNgIYe0_V6KETROQlJI-WxDE,7264 -werkzeug/utils.py,sha256=D_dnCLUfodQ4k0GRSpnI6qDoVoaX7-Dza57bx7sabG0,37101 -werkzeug/wrappers/__init__.py,sha256=-s75nPbyXHzU_rwmLPDhoMuGbEUk0jZT_n0ZQAOFGf8,654 -werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/accept.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/auth.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/base_request.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/base_response.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/common_descriptors.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/cors.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/etag.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/json.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/request.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/response.cpython-38.pyc,, -werkzeug/wrappers/__pycache__/user_agent.cpython-38.pyc,, -werkzeug/wrappers/accept.py,sha256=_oZtAQkahvsrPRkNj2fieg7_St9P0NFC3SgZbJKS6xU,429 -werkzeug/wrappers/auth.py,sha256=rZPCzGxHk9R55PRkmS90kRywUVjjuMWzCGtH68qCq8U,856 -werkzeug/wrappers/base_request.py,sha256=saz9RyNQkvI_XLPYVm29KijNHmD1YzgxDqa0qHTbgss,1174 -werkzeug/wrappers/base_response.py,sha256=q_-TaYywT5G4zA-DWDRDJhJSat2_4O7gOPob6ye4_9A,1186 -werkzeug/wrappers/common_descriptors.py,sha256=v_kWLH3mvCiSRVJ1FNw7nO3w2UJfzY57UKKB5J4zCvE,898 -werkzeug/wrappers/cors.py,sha256=c5UndlZsZvYkbPrp6Gj5iSXxw_VOJDJHskO6-jRmNyQ,846 -werkzeug/wrappers/etag.py,sha256=XHWQQs7Mdd1oWezgBIsl-bYe8ydKkRZVil2Qd01D0Mo,846 -werkzeug/wrappers/json.py,sha256=HM1btPseGeXca0vnwQN_MvZl6h-qNsFY5YBKXKXFwus,410 -werkzeug/wrappers/request.py,sha256=yZGplfC3UqNuykwLJmgywiMhmnoKEGHJOZn_A_ublcQ,24822 -werkzeug/wrappers/response.py,sha256=0n8OcQptiM2e550SALLeg7vC1uWsUbCeE1rPZFfXR78,35177 -werkzeug/wrappers/user_agent.py,sha256=Wl1-A0-1r8o7cHIZQTB55O4Ged6LpCKENaQDlOY5pXA,435 -werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727 diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt deleted file mode 100644 index 6fe8da8..0000000 --- a/venv/lib/python3.8/site-packages/Werkzeug-2.0.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -werkzeug diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst deleted file mode 100644 index d12a849..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA deleted file mode 100644 index 2c8da01..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/METADATA +++ /dev/null @@ -1,111 +0,0 @@ -Metadata-Version: 2.1 -Name: click -Version: 8.0.3 -Summary: Composable command line interface toolkit -Home-page: https://palletsprojects.com/p/click/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Changes, https://click.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/click/ -Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: colorama ; platform_system == "Windows" -Requires-Dist: importlib-metadata ; python_version < "3.8" - -\$ click\_ -========== - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U click - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - import click - - @click.command() - @click.option("--count", default=1, help="Number of greetings.") - @click.option("--name", prompt="Your name", help="The person to greet.") - def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - - if __name__ == '__main__': - hello() - -.. code-block:: text - - $ python hello.py --count=3 - Your name: Click - Hello, Click! - Hello, Click! - Hello, Click! - - -Donate ------- - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://click.palletsprojects.com/ -- Changes: https://click.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/click/ -- Source Code: https://github.com/pallets/click -- Issue Tracker: https://github.com/pallets/click/issues -- Website: https://palletsprojects.com/p/click -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD deleted file mode 100644 index 263a6c6..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/RECORD +++ /dev/null @@ -1,41 +0,0 @@ -click-8.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.0.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click-8.0.3.dist-info/METADATA,sha256=_0jCOf3DdGPvKUZUlBukeb1t6Pnxmm_OMGpaBoDthfc,3247 -click-8.0.3.dist-info/RECORD,, -click-8.0.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -click-8.0.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 -click/__init__.py,sha256=YkIrDg7-0g5aBS6D2pDe58j3MOaFylHED2_8OXh2fnM,3243 -click/__pycache__/__init__.cpython-38.pyc,, -click/__pycache__/_compat.cpython-38.pyc,, -click/__pycache__/_termui_impl.cpython-38.pyc,, -click/__pycache__/_textwrap.cpython-38.pyc,, -click/__pycache__/_unicodefun.cpython-38.pyc,, -click/__pycache__/_winconsole.cpython-38.pyc,, -click/__pycache__/core.cpython-38.pyc,, -click/__pycache__/decorators.cpython-38.pyc,, -click/__pycache__/exceptions.cpython-38.pyc,, -click/__pycache__/formatting.cpython-38.pyc,, -click/__pycache__/globals.cpython-38.pyc,, -click/__pycache__/parser.cpython-38.pyc,, -click/__pycache__/shell_completion.cpython-38.pyc,, -click/__pycache__/termui.cpython-38.pyc,, -click/__pycache__/testing.cpython-38.pyc,, -click/__pycache__/types.cpython-38.pyc,, -click/__pycache__/utils.cpython-38.pyc,, -click/_compat.py,sha256=P15KQumAZC2F2MFe_JSRbvVOJcNosQfMDrdZq0ReCLM,18814 -click/_termui_impl.py,sha256=z78J5HF_RTsOBhjNLjoigaqRap3P2pWwEDDAjoZzgUg,23452 -click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 -click/_unicodefun.py,sha256=JKSh1oSwG_zbjAu4TBCa9tQde2P9FiYcf4MBfy5NdT8,3201 -click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 -click/core.py,sha256=k4PA2z0BT_dmed9I52Q2VLi8r6ekTMCtCQzw2y915Xs,111478 -click/decorators.py,sha256=sGkXJGmP7eLtjtmPl_Un2uBTlrhK8s2L22n-yBiDwTw,14864 -click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 -click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 -click/globals.py,sha256=kGPzxq55Ug4dFUrgRV-5oHVPOPdLCUhmYolbrrVBo8g,1985 -click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=_hPI12T9Ex-y5a3WunWnlH0Gca2_urzXFXkDnt7G6Ow,18001 -click/termui.py,sha256=Rp2gFE8x7j8sEIoFMOcPmuqxJQVWWTrwEzyC14-sPAw,29006 -click/testing.py,sha256=kLR5Qcny1OlgxaGB3gweTr0gQe1yVlmgQRn2esA2Fz4,16020 -click/types.py,sha256=VoNZnIlRBAtRRgzavdqVnyfzY5y4U4qzVGI1UvvX1ls,35391 -click/utils.py,sha256=avYwX-3l2KkdJNUo8NmncZSoAdEmniQ_M5sdsSYloJ4,18759 diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt b/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt deleted file mode 100644 index dca9a90..0000000 --- a/venv/lib/python3.8/site-packages/click-8.0.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -click diff --git a/venv/lib/python3.8/site-packages/click/__init__.py b/venv/lib/python3.8/site-packages/click/__init__.py deleted file mode 100644 index a2ed5d1..0000000 --- a/venv/lib/python3.8/site-packages/click/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" -from .core import Argument as Argument -from .core import BaseCommand as BaseCommand -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import MultiCommand as MultiCommand -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .parser import OptionParser as OptionParser -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import get_terminal_size as get_terminal_size -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_os_args as get_os_args -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - -__version__ = "8.0.3" diff --git a/venv/lib/python3.8/site-packages/click/_compat.py b/venv/lib/python3.8/site-packages/click/_compat.py deleted file mode 100644 index b9e1f0d..0000000 --- a/venv/lib/python3.8/site-packages/click/_compat.py +++ /dev/null @@ -1,627 +0,0 @@ -import codecs -import io -import os -import re -import sys -import typing as t -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) -# Determine local App Engine environment, per Google's own suggestion -APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( - "SERVER_SOFTWARE", "" -) -WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 -auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def get_filesystem_encoding() -> str: - return sys.getfilesystemencoding() or sys.getdefaultencoding() - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write("") # type: ignore - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO, default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO, default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - is_binary: t.Callable[[t.IO, bool], bool], - find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: t.Union[str, os.PathLike, int], - mode: str, - encoding: t.Optional[str], - errors: t.Optional[str], -) -> t.IO: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, -) -> t.Tuple[t.IO, bool]: - binary = "b" in mode - - # Standard streams first. These are simple because they don't need - # special handling for the atomic flag. It's entirely ignored. - if filename == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: t.Optional[int] = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO, af), True - - -class _AtomicFile: - def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> "_AtomicFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi( - stream: t.TextIO, color: t.Optional[bool] = None - ) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s): - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] - ) -> t.Optional[t.TextIO]: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO] -) -> t.Callable[[], t.TextIO]: - cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO: - stream = src_func() - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: t.Mapping[ - str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] -] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/venv/lib/python3.8/site-packages/click/_termui_impl.py b/venv/lib/python3.8/site-packages/click/_termui_impl.py deleted file mode 100644 index 39c1d08..0000000 --- a/venv/lib/python3.8/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,718 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" -import contextlib -import math -import os -import sys -import time -import typing as t -from gettext import gettext as _ - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: t.Optional[t.Iterable[V]], - length: t.Optional[int] = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - label: t.Optional[str] = None, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label = label or "" - if file is None: - file = _default_text_stdout() - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width = width - self.autowidth = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast(t.Iterable[V], range(length)) - self.iter = iter(iterable) - self.length = length - self.pos = 0 - self.avg: t.List[float] = [] - self.start = self.last_eta = time.time() - self.eta_known = False - self.finished = False - self.max_width: t.Optional[int] = None - self.entered = False - self.current_item: t.Optional[V] = None - self.is_hidden = not isatty(self.file) - self._last_line: t.Optional[str] = None - - def __enter__(self) -> "ProgressBar": - self.entered = True - self.render_progress() - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.render_finish() - - def __iter__(self) -> t.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.is_hidden: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.is_hidden: - # Only output the label as it changes if the output is not a - # TTY. Use file=stderr if you expect to be piping stdout. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) # type: ignore - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> t.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if self.is_hidden: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - pager_cmd = (os.environ.get("PAGER", None) or "").strip() - if pager_cmd: - if WIN: - return _tempfilepager(generator, pager_cmd, color) - return _pipepager(generator, pager_cmd, color) - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if WIN or sys.platform.startswith("os2"): - return _tempfilepager(generator, "more <", color) - if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: - return _pipepager(generator, "less", color) - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: - return _pipepager(generator, "more", color) - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - """ - import subprocess - - env = dict(os.environ) - - # If we're piping to less we might support colors under the - # condition that - cmd_detail = cmd.rsplit("/", 1)[-1].split() - if color is None and cmd_detail[0] == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) - stdin = t.cast(t.BinaryIO, c.stdin) - encoding = get_best_encoding(stdin) - try: - for text in generator: - if not color: - text = strip_ansi(text) - - stdin.write(text.encode(encoding, "replace")) - except (OSError, KeyboardInterrupt): - pass - else: - stdin.close() - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - -def _tempfilepager( - generator: t.Iterable[str], cmd: str, color: t.Optional[bool] -) -> None: - """Page through text by invoking a program on a temporary file.""" - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - os.system(f'{cmd} "{filename}"') - finally: - os.close(fd) - os.unlink(filename) - - -def _nullpager( - stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if os.system(f"which {editor} >/dev/null 2>&1") == 0: - return editor - return "vi" - - def edit_file(self, filename: str) -> None: - import subprocess - - editor = self.get_editor() - environ: t.Optional[t.Dict[str, str]] = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - try: - c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: - import tempfile - - if not text: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_file(name) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url.replace('"', "")) - args = f'explorer /select,"{url}"' - else: - url = url.replace('"', "") - wait_str = "/WAIT" if wait else "" - args = f'start {wait_str} "" "{url}"' - return os.system(args) - elif CYGWIN: - if locate: - url = os.path.dirname(_unquote_file(url).replace('"', "")) - args = f'cygstart "{url}"' - else: - url = url.replace('"', "") - wait_str = "-w" if wait else "" - args = f'cygstart {wait_str} "{url}"' - return os.system(args) - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if WIN: - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - func: t.Callable[[], str] - - if echo: - func = msvcrt.getwche # type: ignore - else: - func = msvcrt.getwch # type: ignore - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - - -else: - import tty - import termios - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - f: t.Optional[t.TextIO] - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/venv/lib/python3.8/site-packages/click/_textwrap.py b/venv/lib/python3.8/site-packages/click/_textwrap.py deleted file mode 100644 index b47dcbd..0000000 --- a/venv/lib/python3.8/site-packages/click/_textwrap.py +++ /dev/null @@ -1,49 +0,0 @@ -import textwrap -import typing as t -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: t.List[str], - cur_line: t.List[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> t.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/venv/lib/python3.8/site-packages/click/_unicodefun.py b/venv/lib/python3.8/site-packages/click/_unicodefun.py deleted file mode 100644 index 9cb30c3..0000000 --- a/venv/lib/python3.8/site-packages/click/_unicodefun.py +++ /dev/null @@ -1,100 +0,0 @@ -import codecs -import os -from gettext import gettext as _ - - -def _verify_python_env() -> None: - """Ensures that the environment is good for Unicode.""" - try: - from locale import getpreferredencoding - - fs_enc = codecs.lookup(getpreferredencoding()).name - except Exception: - fs_enc = "ascii" - - if fs_enc != "ascii": - return - - extra = [ - _( - "Click will abort further execution because Python was" - " configured to use ASCII as encoding for the environment." - " Consult https://click.palletsprojects.com/unicode-support/" - " for mitigation steps." - ) - ] - - if os.name == "posix": - import subprocess - - try: - rv = subprocess.Popen( - ["locale", "-a"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding="ascii", - errors="replace", - ).communicate()[0] - except OSError: - rv = "" - - good_locales = set() - has_c_utf8 = False - - for line in rv.splitlines(): - locale = line.strip() - - if locale.lower().endswith((".utf-8", ".utf8")): - good_locales.add(locale) - - if locale.lower() in ("c.utf8", "c.utf-8"): - has_c_utf8 = True - - if not good_locales: - extra.append( - _( - "Additional information: on this system no suitable" - " UTF-8 locales were discovered. This most likely" - " requires resolving by reconfiguring the locale" - " system." - ) - ) - elif has_c_utf8: - extra.append( - _( - "This system supports the C.UTF-8 locale which is" - " recommended. You might be able to resolve your" - " issue by exporting the following environment" - " variables:" - ) - ) - extra.append(" export LC_ALL=C.UTF-8\n export LANG=C.UTF-8") - else: - extra.append( - _( - "This system lists some UTF-8 supporting locales" - " that you can pick from. The following suitable" - " locales were discovered: {locales}" - ).format(locales=", ".join(sorted(good_locales))) - ) - - bad_locale = None - - for env_locale in os.environ.get("LC_ALL"), os.environ.get("LANG"): - if env_locale and env_locale.lower().endswith((".utf-8", ".utf8")): - bad_locale = env_locale - - if env_locale is not None: - break - - if bad_locale is not None: - extra.append( - _( - "Click discovered that you exported a UTF-8 locale" - " but the locale system could not pick up from it" - " because it does not exist. The exported locale is" - " {locale!r} but it is not supported." - ).format(locale=bad_locale) - ) - - raise RuntimeError("\n\n".join(extra)) diff --git a/venv/lib/python3.8/site-packages/click/_winconsole.py b/venv/lib/python3.8/site-packages/click/_winconsole.py deleted file mode 100644 index 6b20df3..0000000 --- a/venv/lib/python3.8/site-packages/click/_winconsole.py +++ /dev/null @@ -1,279 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -import io -import sys -import time -import typing as t -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj, writable=False): - buf = Py_buffer() - flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - return buffer_type.from_address(buf.buf) - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle): - self.handle = handle - - def isatty(self): - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self): - return True - - def readinto(self, b): - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self): - return True - - @staticmethod - def _get_error_message(errno): - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b): - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self): - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> t.Optional[t.TextIO]: - if ( - get_buffer is not None - and encoding in {"utf-16-le", None} - and errors in {"strict", None} - and _is_console(f) - ): - func = _stream_factories.get(f.fileno()) - if func is not None: - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/venv/lib/python3.8/site-packages/click/core.py b/venv/lib/python3.8/site-packages/click/core.py deleted file mode 100644 index f226354..0000000 --- a/venv/lib/python3.8/site-packages/click/core.py +++ /dev/null @@ -1,2953 +0,0 @@ -import enum -import errno -import os -import sys -import typing -import typing as t -from collections import abc -from contextlib import contextmanager -from contextlib import ExitStack -from functools import partial -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat - -from . import types -from ._unicodefun import _verify_python_env -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import OptionParser -from .parser import split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - import typing_extensions as te - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: "Context", incomplete: str -) -> t.Iterator[t.Tuple[str, "Command"]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(MultiCommand, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_multicommand( - base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, MultiCommand): - return - if register: - hint = ( - "It is not possible to add multi commands as children to" - " another multi command that is in chain mode." - ) - else: - hint = ( - "Found a multi command as subcommand to a multi command" - " that is in chain mode. This is not supported." - ) - raise RuntimeError( - f"{hint}. Command {base_command.name!r} is set to chain and" - f" {cmd_name!r} was added as a subcommand but it in itself is a" - f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" - f" within a chained {type(base_command).__name__} named" - f" {base_command.name!r})." - ) - - -def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size))) - - -@contextmanager -def augment_usage_errors( - ctx: "Context", param: t.Optional["Parameter"] = None -) -> t.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: t.Sequence["Parameter"], - declaration_order: t.Sequence["Parameter"], -) -> t.List["Parameter"]: - """Given a sequence of parameters in the order as should be considered - for processing and an iterable of parameters that exist, this returns - a list in the correct order as they should be processed. - """ - - def sort_key(item: "Parameter") -> t.Tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show defaults for all options. If not set, - defaults to the value from a parent context. Overrides an - option's ``show_default`` argument. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: t.Type["HelpFormatter"] = HelpFormatter - - def __init__( - self, - command: "Command", - parent: t.Optional["Context"] = None, - info_name: t.Optional[str] = None, - obj: t.Optional[t.Any] = None, - auto_envvar_prefix: t.Optional[str] = None, - default_map: t.Optional[t.Dict[str, t.Any]] = None, - terminal_width: t.Optional[int] = None, - max_content_width: t.Optional[int] = None, - resilient_parsing: bool = False, - allow_extra_args: t.Optional[bool] = None, - allow_interspersed_args: t.Optional[bool] = None, - ignore_unknown_options: t.Optional[bool] = None, - help_option_names: t.Optional[t.List[str]] = None, - token_normalize_func: t.Optional[t.Callable[[str], str]] = None, - color: t.Optional[bool] = None, - show_default: t.Optional[bool] = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: t.Dict[str, t.Any] = {} - #: the leftover arguments. - self.args: t.List[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self.protected_args: t.List[str] = [] - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: t.Optional[str] = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: t.Optional[int] = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: t.Optional[int] = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: t.List[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Optional[ - t.Callable[[str], str] - ] = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: t.Optional[bool] = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: t.Optional[bool] = show_default - - self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: t.Dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> "Context": - self._depth += 1 - push_context(self) - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> t.Dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: t.ContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> "Context": - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: - """Finds the closest object of a given type.""" - node: t.Optional["Context"] = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: t.Type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @typing.overload - def lookup_default( - self, name: str, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @typing.overload - def lookup_default( - self, name: str, call: "te.Literal[False]" = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> "te.NoReturn": - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> "te.NoReturn": - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> "te.NoReturn": - """Exits the application with a given exit code.""" - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: "Command") -> "Context": - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - def invoke( - __self, # noqa: B902 - __callback: t.Union["Command", t.Callable[..., t.Any]], - *args: t.Any, - **kwargs: t.Any, - ) -> t.Any: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - Note that before Click 3.2 keyword arguments were not properly filled - in against the intention of this code and no context was created. For - more information about this change and why it was done in a bugfix - release see :ref:`upgrade-to-3.2`. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - """ - if isinstance(__callback, Command): - other_cmd = __callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - __callback = other_cmd.callback - - ctx = __self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = __self - - with augment_usage_errors(__self): - with ctx: - return __callback(*args, **kwargs) - - def forward( - __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 - ) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(__cmd, Command): - raise TypeError("Callback is not a command.") - - for param in __self.params: - if param not in kwargs: - kwargs[param] = __self.params[param] - - return __self.invoke(__cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class BaseCommand: - """The base command implements the minimal API contract of commands. - Most code will never use this as it does not implement a lot of useful - functionality but it can act as the direct subclass of alternative - parsing methods that do not depend on the Click parser. - - For instance, this can be used to bridge Click and other systems like - argparse or docopt. - - Because base commands do not implement a lot of the API that other - parts of Click take for granted, they are not supported for all - operations. For instance, they cannot be used with the decorators - usually and they have no built-in callback system. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: t.Type[Context] = Context - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.Dict[str, t.Any]] = None, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: t.Dict[str, t.Any] = context_settings - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire structure - below this command. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - :param ctx: A :class:`Context` representing this command. - - .. versionadded:: 8.0 - """ - return {"name": self.name} - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get usage") - - def get_help(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get help") - - def make_context( - self, - info_name: t.Optional[str], - args: t.List[str], - parent: t.Optional[Context] = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class( - self, info_name=info_name, parent=parent, **extra # type: ignore - ) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - """Given a context and a list of arguments this creates the parser - and parses the arguments, then modifies the context as necessary. - This is automatically invoked by :meth:`make_context`. - """ - raise NotImplementedError("Base commands do not know how to parse arguments.") - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the command. The default - implementation is raising a not implemented error. - """ - raise NotImplementedError("Base commands are not invokable by default") - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. Other - command classes will return more completions. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, MultiCommand) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx.protected_args - ) - - return results - - @typing.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: "te.Literal[True]" = True, - **extra: t.Any, - ) -> "te.NoReturn": - ... - - @typing.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: - ... - - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - # Verify that the environment is configured correctly, or reject - # further execution to avoid a broken script. - _verify_python_env() - - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt): - echo(file=sys.stderr) - raise Abort() from None - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: t.Optional[str] = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - """ - if complete_var is None: - complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class Command(BaseCommand): - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - .. versionchanged:: 8.0 - Added repr showing the command name - .. versionchanged:: 7.1 - Added the `no_args_is_help` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - - :param deprecated: issues a message indicating that - the command is deprecated. - """ - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.Dict[str, t.Any]] = None, - callback: t.Optional[t.Callable[..., t.Any]] = None, - params: t.Optional[t.List["Parameter"]] = None, - help: t.Optional[str] = None, - epilog: t.Optional[str] = None, - short_help: t.Optional[str] = None, - options_metavar: t.Optional[str] = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool = False, - ) -> None: - super().__init__(name, context_settings) - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: t.List["Parameter"] = params or [] - - # if a form feed (page break) is found in the help text, truncate help - # text to the content preceding the first form feed - if help and "\f" in help: - help = help.split("\f", 1)[0] - - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - info_dict.update( - params=[param.to_info_dict() for param in self.get_params(ctx)], - help=self.help, - epilog=self.epilog, - short_help=self.short_help, - hidden=self.hidden, - deprecated=self.deprecated, - ) - return info_dict - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> t.List["Parameter"]: - rv = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - rv = [*rv, help_option] - - return rv - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> t.List[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> t.Optional["Option"]: - """Returns the help option object.""" - help_options = self.get_help_option_names(ctx) - - if not help_options or not self.add_help_option: - return None - - def show_help(ctx: Context, param: "Parameter", value: str) -> None: - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - return Option( - help_options, - is_flag=True, - is_eager=True, - expose_value=False, - callback=show_help, - help=_("Show this message and exit."), - ) - - def make_parser(self, ctx: Context) -> OptionParser: - """Creates the underlying option parser for this command.""" - parser = OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - text = self.short_help or "" - - if not text and self.help: - text = make_default_short_help(self.help, limit) - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - text = self.help or "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - formatter.write_paragraph() - with formatter.indentation(): - formatter.write_text(self.epilog) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - message = _( - "DeprecationWarning: The command {name!r} is deprecated." - ).format(name=self.name) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class MultiCommand(Command): - """A multi command is the basic implementation of a command that - dispatches to subcommands. The most common version is the - :class:`Group`. - - :param invoke_without_command: this controls how the multi command itself - is invoked. By default it's only invoked - if a subcommand is provided. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is enabled by default if - `invoke_without_command` is disabled or disabled - if it's enabled. If enabled this will add - ``--help`` as argument if no arguments are - passed. - :param subcommand_metavar: the string that is used in the documentation - to indicate the subcommand place. - :param chain: if this is set to `True` chaining of multiple subcommands - is enabled. This restricts the form of commands in that - they cannot have optional arguments but it allows - multiple commands to be chained together. - :param result_callback: The result callback to attach to this multi - command. This can be set or changed later with the - :meth:`result_callback` decorator. - """ - - allow_extra_args = True - allow_interspersed_args = False - - def __init__( - self, - name: t.Optional[str] = None, - invoke_without_command: bool = False, - no_args_is_help: t.Optional[bool] = None, - subcommand_metavar: t.Optional[str] = None, - chain: bool = False, - result_callback: t.Optional[t.Callable[..., t.Any]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "Multi commands in chain mode cannot have" - " optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(__value, *args, **kwargs): # type: ignore - inner = old_callback(__value, *args, **kwargs) # type: ignore - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv - - return decorator - - def resultcallback(self, replace: bool = False) -> t.Callable[[F], F]: - import warnings - - warnings.warn( - "'resultcallback' has been renamed to 'result_callback'." - " The old name will be removed in Click 8.1.", - DeprecationWarning, - stacklevel=2, - ) - return self.result_callback(replace=replace) - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx.protected_args = rest - ctx.args = [] - elif rest: - ctx.protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx.protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with None for regular groups, or an empty list - # for chained groups. - with ctx: - super().invoke(ctx) - return _process_result([] if self.chain else None) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx.protected_args, *ctx.args] - ctx.args = [] - ctx.protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: t.List[str] - ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if split_opt(cmd_name)[0]: - self.parse_args(ctx, ctx.args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - """Given a context and a command name, this returns a - :class:`Command` object if it exists or returns `None`. - """ - raise NotImplementedError - - def list_commands(self, ctx: Context) -> t.List[str]: - """Returns a list of subcommand names in the order they should - appear. - """ - return [] - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class Group(MultiCommand): - """A group allows a command to have subcommands attached. This is - the most common way to implement nesting in Click. - - :param name: The name of the group command. - :param commands: A dict mapping names to :class:`Command` objects. - Can also be a list of :class:`Command`, which will use - :attr:`Command.name` to create the dict. - :param attrs: Other command arguments described in - :class:`MultiCommand`, :class:`Command`, and - :class:`BaseCommand`. - - .. versionchanged:: 8.0 - The ``commmands`` argument can be a list of command objects. - """ - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: t.Optional[t.Type[Command]] = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: t.Optional[str] = None, - commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: t.Dict[str, Command] = commands - - def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_multicommand(self, name, cmd, register=True) - self.commands[name] = cmd - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - if self.command_class is not None and "cls" not in kwargs: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - return decorator - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - if self.group_class is not None and "cls" not in kwargs: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> "Group": - cmd = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> t.List[str]: - return sorted(self.commands) - - -class CommandCollection(MultiCommand): - """A command collection is a multi command that merges multiple multi - commands together into one. This is a straightforward implementation - that accepts a list of different multi commands as sources and - provides all the commands for each of them. - """ - - def __init__( - self, - name: t.Optional[str] = None, - sources: t.Optional[t.List[MultiCommand]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - #: The list of registered multi commands. - self.sources: t.List[MultiCommand] = sources or [] - - def add_source(self, multi_cmd: MultiCommand) -> None: - """Adds a new multi command to the chain dispatcher.""" - self.sources.append(multi_cmd) - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_multicommand(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> t.List[str]: - rv: t.Set[str] = set() - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> t.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The later is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - required: bool = False, - default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, - callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, - nargs: t.Optional[int] = None, - multiple: bool = False, - metavar: t.Optional[str] = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, - shell_complete: t.Optional[ - t.Callable[ - [Context, "Parameter", str], - t.Union[t.List["CompletionItem"], t.List[str]], - ] - ] = None, - autocompletion: t.Optional[ - t.Callable[ - [Context, t.List[str], str], t.List[t.Union[t.Tuple[str, str], str]] - ] - ] = None, - ) -> None: - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - - if autocompletion is not None: - import warnings - - warnings.warn( - "'autocompletion' is renamed to 'shell_complete'. The old name is" - " deprecated and will be removed in Click 8.1. See the docs about" - " 'Parameter' for information about new behavior.", - DeprecationWarning, - stacklevel=2, - ) - - def shell_complete( - ctx: Context, param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - from click.shell_completion import CompletionItem - - out = [] - - for c in autocompletion(ctx, [], incomplete): # type: ignore - if isinstance(c, tuple): - c = CompletionItem(c[0], help=c[1]) - elif isinstance(c, str): - c = CompletionItem(c) - - if c.value.startswith(incomplete): - out.append(c) - - return out - - self._custom_shell_complete = shell_complete - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(self) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @typing.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @typing.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, t.Any] - ) -> t.Tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> t.Iterator: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - convert: t.Callable[[t.Any], t.Any] = partial( - self.type, param=self, ctx=ctx - ) - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Tuple: - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Tuple: - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] - ) -> t.Tuple[t.Any, t.List[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - pass - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast(t.List["CompletionItem"], results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: controls if the default value should be shown on the - help page. Normally, defaults are not shown. If this - value is a string, it shows the string instead of the - value. This is particularly useful for dynamic options. - :param show_envvar: controls if an environment variable should be shown on - the help page. Normally, environment variables - are not shown. - :param prompt: if set to `True` or a non empty string then the user will be - prompted for input. If set to `True` the prompt will be the - option name capitalized. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: if this is `True` then the input on the prompt will be - hidden from the user. This is useful for password - input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - show_default: t.Union[bool, str] = False, - prompt: t.Union[bool, str] = False, - confirmation_prompt: t.Union[bool, str] = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: t.Optional[bool] = None, - flag_value: t.Optional[t.Any] = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - help: t.Optional[str] = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - **attrs: t.Any, - ) -> None: - default_is_missing = "default" not in attrs - super().__init__(param_decls, type=type, multiple=multiple, **attrs) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = t.cast(str, prompt) - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - if is_flag and default_is_missing: - self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False - - if flag_value is None: - flag_value = not self.default - - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError("Could not determine name for option") - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: t.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar()}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - extra = [] - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - var_str = ( - envvar - if isinstance(envvar, str) - else ", ".join(str(d) for d in envvar) - ) - extra.append(_("env var: {var}").format(var=var_str)) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default_is_str = isinstance(self.show_default, str) - - if show_default_is_str or ( - default_value is not None and (self.show_default or ctx.show_default) - ): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif callable(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = split_opt( - (self.opts if self.default else self.secondary_opts)[0] - )[1] - else: - default_string = str(default_value) - - if default_string: - extra.append(_("default: {default}").format(default=default_string)) - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra.append(range_str) - - if self.required: - extra.append(_("required")) - - if extra: - extra_str = "; ".join(extra) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - @typing.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @typing.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return param.flag_value # type: ignore - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - ) - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - return rv - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, "Parameter"] - ) -> t.Tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the parameter constructor. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: t.Sequence[str], - required: t.Optional[bool] = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(self) - if not var: - var = self.name.upper() # type: ignore - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Could not determine name for argument") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [self.make_metavar()] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar()}'" - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/venv/lib/python3.8/site-packages/click/decorators.py b/venv/lib/python3.8/site-packages/click/decorators.py deleted file mode 100644 index f1cc005..0000000 --- a/venv/lib/python3.8/site-packages/click/decorators.py +++ /dev/null @@ -1,436 +0,0 @@ -import inspect -import types -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -FC = t.TypeVar("FC", t.Callable[..., t.Any], Command) - - -def pass_context(f: F) -> F: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args, **kwargs): # type: ignore - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def pass_obj(f: F) -> F: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args, **kwargs): # type: ignore - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def make_pass_decorator( - object_type: t.Type, ensure: bool = False -) -> "t.Callable[[F], F]": - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: F) -> F: - def new_func(*args, **kwargs): # type: ignore - ctx = get_current_context() - - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: t.Optional[str] = None -) -> "t.Callable[[F], F]": - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: F) -> F: - def new_func(*args, **kwargs): # type: ignore - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -def _make_command( - f: F, - name: t.Optional[str], - attrs: t.MutableMapping[str, t.Any], - cls: t.Type[Command], -) -> Command: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - try: - params = f.__click_params__ # type: ignore - params.reverse() - del f.__click_params__ # type: ignore - except AttributeError: - params = [] - - help = attrs.get("help") - - if help is None: - help = inspect.getdoc(f) - else: - help = inspect.cleandoc(help) - - attrs["help"] = help - return cls( - name=name or f.__name__.lower().replace("_", "-"), - callback=f, - params=params, - **attrs, - ) - - -def command( - name: t.Optional[str] = None, - cls: t.Optional[t.Type[Command]] = None, - **attrs: t.Any, -) -> t.Callable[[F], Command]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function with - underscores replaced by dashes. If you want to change that, you can - pass the intended name as the first argument. - - All keyword arguments are forwarded to the underlying command class. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: the name of the command. This defaults to the function - name with underscores replaced by dashes. - :param cls: the command class to instantiate. This defaults to - :class:`Command`. - """ - if cls is None: - cls = Command - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd = _make_command(f, name, attrs, cls) # type: ignore - cmd.__doc__ = f.__doc__ - return cmd - - return decorator - - -def group(name: t.Optional[str] = None, **attrs: t.Any) -> t.Callable[[F], Group]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - """ - attrs.setdefault("cls", Group) - return t.cast(Group, command(name, **attrs)) - - -def _param_memo(f: FC, param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - """ - - def decorator(f: FC) -> FC: - ArgumentClass = attrs.pop("cls", Argument) - _param_memo(f, ArgumentClass(param_decls, **attrs)) - return f - - return decorator - - -def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - """ - - def decorator(f: FC) -> FC: - # Issue 926, copy attrs, so pre-defined options can re-use the same cls= - option_attrs = attrs.copy() - - if "help" in option_attrs: - option_attrs["help"] = inspect.cleandoc(option_attrs["help"]) - OptionClass = option_attrs.pop("cls", Option) - _param_memo(f, OptionClass(param_decls, **option_attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: t.Optional[str] = None, - *param_decls: str, - package_name: t.Optional[str] = None, - prog_name: t.Optional[str] = None, - message: t.Optional[str] = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. On Python < 3.8, the ``importlib_metadata`` - backport must be installed. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - metadata: t.Optional[types.ModuleType] - - try: - from importlib import metadata # type: ignore - except ImportError: - # Python < 3.8 - import importlib_metadata as metadata # type: ignore - - try: - version = metadata.version(package_name) # type: ignore - except metadata.PackageNotFoundError: # type: ignore - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - t.cast(str, message) - % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--help`` option which immediately prints the help page - and exits the program. - - This is usually unnecessary, as the ``--help`` option is added to - each command automatically unless ``add_help_option=False`` is - passed. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.8/site-packages/click/exceptions.py b/venv/lib/python3.8/site-packages/click/exceptions.py deleted file mode 100644 index 9e20b3e..0000000 --- a/venv/lib/python3.8/site-packages/click/exceptions.py +++ /dev/null @@ -1,287 +0,0 @@ -import os -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .utils import echo - -if t.TYPE_CHECKING: - from .core import Context - from .core import Parameter - - -def _join_param_hints( - param_hint: t.Optional[t.Union[t.Sequence[str], str]] -) -> t.Optional[str]: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.Optional[t.IO] = None) -> None: - if file is None: - file = get_text_stderr() - - echo(_("Error: {message}").format(message=self.format_message()), file=file) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd = self.ctx.command if self.ctx else None - - def show(self, file: t.Optional[t.IO] = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: t.Optional[str] = None, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - param_type: t.Optional[str] = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: t.Optional[str] = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message(self.param) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: t.Optional[str] = None, - possibilities: t.Optional[t.Sequence[str]] = None, - ctx: t.Optional["Context"] = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: t.Optional["Context"] = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename = os.fsdecode(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code = code diff --git a/venv/lib/python3.8/site-packages/click/formatting.py b/venv/lib/python3.8/site-packages/click/formatting.py deleted file mode 100644 index ddd2a2f..0000000 --- a/venv/lib/python3.8/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -import typing as t -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: t.Optional[int] = None - - -def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: - widths: t.Dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: t.Iterable[t.Tuple[str, str]], col_count: int -) -> t.Iterator[t.Tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: t.List[t.Tuple[int, bool, str]] = [] - buf: t.List[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: t.Optional[int] = None, - max_width: t.Optional[int] = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent = 0 - self.buffer: t.List[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage( - self, prog: str, args: str = "", prefix: t.Optional[str] = None - ) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: t.Sequence[t.Tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> t.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> t.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.8/site-packages/click/globals.py b/venv/lib/python3.8/site-packages/click/globals.py deleted file mode 100644 index a7b0c93..0000000 --- a/venv/lib/python3.8/site-packages/click/globals.py +++ /dev/null @@ -1,69 +0,0 @@ -import typing -import typing as t -from threading import local - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - -_local = local() - - -@typing.overload -def get_current_context(silent: "te.Literal[False]" = False) -> "Context": - ... - - -@typing.overload -def get_current_context(silent: bool = ...) -> t.Optional["Context"]: - ... - - -def get_current_context(silent: bool = False) -> t.Optional["Context"]: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: "Context") -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/venv/lib/python3.8/site-packages/click/parser.py b/venv/lib/python3.8/site-packages/click/parser.py deleted file mode 100644 index 2d5a2ed..0000000 --- a/venv/lib/python3.8/site-packages/click/parser.py +++ /dev/null @@ -1,529 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: t.Sequence[str], nargs_spec: t.Sequence[int] -) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] - spos: t.Optional[int] = None - - def _fetch(c: "te.Deque[V]") -> t.Optional[V]: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def split_opt(opt: str) -> t.Tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -def split_arg_string(string: str) -> t.List[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -class Option: - def __init__( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes = set() - - for opt in opts: - prefix, value = split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: str, state: "ParsingState") -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class Argument: - def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], - state: "ParsingState", - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class ParsingState: - def __init__(self, rargs: t.List[str]) -> None: - self.opts: t.Dict[str, t.Any] = {} - self.largs: t.List[str] = [] - self.rargs = rargs - self.order: t.List["CoreParameter"] = [] - - -class OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - """ - - def __init__(self, ctx: t.Optional["Context"] = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: t.Dict[str, Option] = {} - self._long_opt: t.Dict[str, Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: t.List[Argument] = [] - - def add_option( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [normalize_opt(opt, self.ctx) for opt in opts] - option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument( - self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 - ) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: t.List[str] - ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: t.Optional[str], state: ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we re-combinate the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: Option, state: ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) diff --git a/venv/lib/python3.8/site-packages/click/py.typed b/venv/lib/python3.8/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.8/site-packages/click/shell_completion.py b/venv/lib/python3.8/site-packages/click/shell_completion.py deleted file mode 100644 index cad080d..0000000 --- a/venv/lib/python3.8/site-packages/click/shell_completion.py +++ /dev/null @@ -1,581 +0,0 @@ -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import BaseCommand -from .core import Context -from .core import MultiCommand -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .parser import split_arg_string -from .utils import echo - - -def shell_complete( - cli: BaseCommand, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: t.Optional[str] = None, - **kwargs: t.Any, - ) -> None: - self.value = value - self.type = type - self.help = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -compdef %(complete_func)s %(prog_name)s; -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response; - - for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - set response $response $value; - end; - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: BaseCommand, - ctx_args: t.Dict[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> t.Dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions( - self, args: t.List[str], incomplete: str - ) -> t.List[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - def _check_version(self) -> None: - import subprocess - - output = subprocess.run( - ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - raise RuntimeError( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ) - ) - else: - raise RuntimeError( - _("Couldn't detect Bash version, shell completion is not supported.") - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -_available_shells: t.Dict[str, t.Type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: t.Type[ShellComplete], name: t.Optional[str] = None -) -> None: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - -def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - value = ctx.params[param.name] - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - # Allow "/" since that starts a path. - return not c.isalnum() and c != "/" - - -def _is_incomplete_option(args: t.List[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str] -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - ctx = cli.make_context(prog_name, args.copy(), **ctx_args) - args = ctx.protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, MultiCommand): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) - args = ctx.protected_args + ctx.args - else: - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - sub_ctx = cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx.protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: t.List[str], incomplete: str -) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/venv/lib/python3.8/site-packages/click/termui.py b/venv/lib/python3.8/site-packages/click/termui.py deleted file mode 100644 index cf8d5f1..0000000 --- a/venv/lib/python3.8/site-packages/click/termui.py +++ /dev/null @@ -1,809 +0,0 @@ -import inspect -import io -import itertools -import os -import sys -import typing -import typing as t -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from ._compat import WIN -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Optional[t.Any] = None, - show_choices: bool = True, - type: t.Optional[ParamType] = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name # type: ignore - - return default - - -def prompt( - text: str, - default: t.Optional[t.Any] = None, - hide_input: bool = False, - confirmation_prompt: t.Union[bool, str] = False, - type: t.Optional[t.Union[ParamType, t.Any]] = None, - value_proc: t.Optional[t.Callable[[str], t.Any]] = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending a interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = t.cast(str, confirmation_prompt) - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 - continue - if not confirmation_prompt: - return result - while True: - confirmation_prompt = t.cast(str, confirmation_prompt) - value2 = prompt_func(confirmation_prompt) - if value2: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: t.Optional[bool] = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def get_terminal_size() -> os.terminal_size: - """Returns the current size of the terminal as tuple in the form - ``(width, height)`` in columns and rows. - - .. deprecated:: 8.0 - Will be removed in Click 8.1. Use - :func:`shutil.get_terminal_size` instead. - """ - import shutil - import warnings - - warnings.warn( - "'click.get_terminal_size()' is deprecated and will be removed" - " in Click 8.1. Use 'shutil.get_terminal_size()' instead.", - DeprecationWarning, - stacklevel=2, - ) - return shutil.get_terminal_size() - - -def echo_via_pager( - text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], - color: t.Optional[bool] = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast(t.Iterable[str], text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -def progressbar( - iterable: t.Optional[t.Iterable[V]] = None, - length: t.Optional[int] = None, - label: t.Optional[str] = None, - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, -) -> "ProgressBar[V]": - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - Added the ``update_min_steps`` parameter. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. Added the ``update`` method to - the object. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - if WIN: - os.system("cls") - else: - sys.stdout.write("\033[2J\033[1;1H") - - -def _interpret_color( - color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 -) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bold: t.Optional[bool] = None, - dim: t.Optional[bool] = None, - underline: t.Optional[bool] = None, - overline: t.Optional[bool] = None, - italic: t.Optional[bool] = None, - blink: t.Optional[bool] = None, - reverse: t.Optional[bool] = None, - strikethrough: t.Optional[bool] = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -def edit( - text: t.Optional[t.AnyStr] = None, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - filename: t.Optional[str] = None, -) -> t.Optional[t.AnyStr]: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - ed.edit_file(filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Optional[t.Callable[[bool], str]] = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> t.ContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: t.Optional[str] = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv/lib/python3.8/site-packages/click/testing.py b/venv/lib/python3.8/site-packages/click/testing.py deleted file mode 100644 index d19b850..0000000 --- a/venv/lib/python3.8/site-packages/click/testing.py +++ /dev/null @@ -1,479 +0,0 @@ -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from .core import BaseCommand - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> t.List[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> t.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: t.Optional[t.Union[str, bytes, t.IO]], charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast(t.IO, input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(t.cast(bytes, input)) - - -class Result: - """Holds the captured result of an invoked CLI script.""" - - def __init__( - self, - runner: "CliRunner", - stdout_bytes: bytes, - stderr_bytes: t.Optional[bytes], - return_value: t.Any, - exit_code: int, - exception: t.Optional[BaseException], - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ] = None, - ): - #: The runner that created the result - self.runner = runner - #: The standard output as bytes. - self.stdout_bytes = stdout_bytes - #: The standard error as bytes, or None if not available - self.stderr_bytes = stderr_bytes - #: The value returned from the invoked command. - #: - #: .. versionadded:: 8.0 - self.return_value = return_value - #: The exit code as integer. - self.exit_code = exit_code - #: The exception that happened if one did. - self.exception = exception - #: The traceback - self.exc_info = exc_info - - @property - def output(self) -> str: - """The (standard) output as unicode string.""" - return self.stdout - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string.""" - if self.stderr_bytes is None: - raise ValueError("stderr not separately captured") - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from stdin writes - to stdout. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param mix_stderr: if this is set to `False`, then stdout and stderr are - preserved as independent streams. This is useful for - Unix-philosophy apps that have predictable stdout and - noisy stderr, such that each may be measured - independently - """ - - def __init__( - self, - charset: str = "utf-8", - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - echo_stdin: bool = False, - mix_stderr: bool = True, - ) -> None: - self.charset = charset - self.env = env or {} - self.echo_stdin = echo_stdin - self.mix_stderr = mix_stderr - - def get_default_prog_name(self, cli: "BaseCommand") -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None - ) -> t.Mapping[str, t.Optional[str]]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: t.Optional[t.Union[str, bytes, t.IO]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - color: bool = False, - ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up stdin with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into sys.stdin. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - ``stderr`` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - bytes_output = io.BytesIO() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, bytes_output) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - bytes_output, encoding=self.charset, name="", mode="w" - ) - - bytes_error = None - if self.mix_stderr: - sys.stderr = sys.stdout - else: - bytes_error = io.BytesIO() - sys.stderr = _NamedTextIOWrapper( - bytes_error, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(prompt or "") - val = text_input.readline().rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return text_input.readline().rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (bytes_output, bytes_error) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: "BaseCommand", - args: t.Optional[t.Union[str, t.Sequence[str]]] = None, - input: t.Optional[t.Union[str, bytes, t.IO]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - catch_exceptions: bool = True, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: t.Optional[BaseException] = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - stdout = outstreams[0].getvalue() - if self.mix_stderr: - stderr = None - else: - stderr = outstreams[1].getvalue() # type: ignore - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None - ) -> t.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - t = tempfile.mkdtemp(dir=temp_dir) - os.chdir(t) - - try: - yield t - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(t) - except OSError: # noqa: B014 - pass diff --git a/venv/lib/python3.8/site-packages/click/types.py b/venv/lib/python3.8/site-packages/click/types.py deleted file mode 100644 index 103d218..0000000 --- a/venv/lib/python3.8/site-packages/click/types.py +++ /dev/null @@ -1,1052 +0,0 @@ -import os -import stat -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import get_filesystem_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[t.Optional[str]] = None - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - return {"param_type": param_type, "name": self.name} - - def __call__( - self, - value: t.Any, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: "Parameter") -> t.Optional[str]: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: "Parameter") -> t.Optional[str]: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> t.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> "t.NoReturn": - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name = func.__name__ - self.func = func - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = get_filesystem_encoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set - of supported values. All of these values have to be strings. - - You should only pass a list or tuple of choices. Other iterables - (like generators) may lead to surprising results. - - The resulting value will always be one of the originally passed choices - regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` - being specified. - - See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - """ - - name = "choice" - - def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: - self.choices = choices - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - choices_str = "|".join(self.choices) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: "Parameter") -> str: - return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - # Match through normalization and case sensitivity - # first do token_normalize_func, then lowercase - # preserve original `value` to produce an accurate message in - # `self.fail` - normed_value = value - normed_choices = {choice: choice for choice in self.choices} - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(value) - normed_choices = { - ctx.token_normalize_func(normed_choice): original - for normed_choice, original in normed_choices.items() - } - - if not self.case_sensitive: - normed_value = normed_value.casefold() - normed_choices = { - normed_choice.casefold(): original - for normed_choice, original in normed_choices.items() - } - - if normed_value in normed_choices: - return normed_choices[normed_value] - - choices_str = ", ".join(map(repr, self.choices)) - self.fail( - ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: t.Optional[t.Sequence[str]] = None): - self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[t.Type] - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: "te.Literal[1, -1]", open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - if not open: - return bound - - # Could use Python 3.9's math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Starting with Click 2.0, files can also be opened atomically in which - case all writes go into a separate file in the same folder and upon - completion the file will be moved over to the original location. This - is useful if a file regularly read by other users is modified. - - See :ref:`file-args` for more information. - """ - - name = "filename" - envvar_list_splitter = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: t.Optional[bool] = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: t.Any) -> bool: - if self.lazy is not None: - return self.lazy - if value == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - if hasattr(value, "read") or hasattr(value, "write"): - return value - - lazy = self.resolve_lazy_flag(value) - - if lazy: - f: t.IO = t.cast( - t.IO, - LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ), - ) - - if ctx is not None: - ctx.call_on_close(f.close_intelligently) # type: ignore - - return f - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: # noqa: B014 - self.fail(f"{os.fsdecode(value)!r}: {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -class Path(ParamType): - """The path type is similar to the :class:`File` type but it performs - different checks. First of all, instead of returning an open file - handle it returns just the filename. Secondly, it can perform various - basic checks about what the file or directory should be. - - :param exists: if set to true, the file or directory needs to exist for - this value to be valid. If this is not required and a - file does indeed not exist, then all further checks are - silently skipped. - :param file_okay: controls if a file is a possible value. - :param dir_okay: controls if a directory is a possible value. - :param writable: if true, a writable check is performed. - :param readable: if true, a readable check is performed. - :param resolve_path: if this is true, then the path is fully resolved - before the value is passed onwards. This means - that it's absolute and symlinks are resolved. It - will not expand a tilde-prefix, as this is - supposed to be done by the shell only. - :param allow_dash: If this is set to `True`, a single dash to indicate - standard streams is permitted. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.0 - Allow passing ``type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: t.Optional[t.Type] = None, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.writable = writable - self.readable = readable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result(self, rv: t.Any) -> t.Any: - if self.type is not None and not isinstance(rv, self.type): - if self.type is str: - rv = os.fsdecode(rv) - elif self.type is bytes: - rv = os.fsencode(rv) - else: - rv = self.type(rv) - - return rv - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - # os.path.realpath doesn't resolve symlinks on Windows - # until Python 3.8. Use pathlib for now. - import pathlib - - rv = os.fsdecode(pathlib.Path(rv).resolve()) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} {filename!r} is a directory.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=os.fsdecode(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None: - self.types = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) - - -def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() diff --git a/venv/lib/python3.8/site-packages/click/utils.py b/venv/lib/python3.8/site-packages/click/utils.py deleted file mode 100644 index 16033d6..0000000 --- a/venv/lib/python3.8/site-packages/click/utils.py +++ /dev/null @@ -1,579 +0,0 @@ -import os -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import get_filesystem_encoding -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: F) -> F: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args, **kwargs): # type: ignore - try: - return func(*args, **kwargs) - except Exception: - pass - - return update_wrapper(t.cast(F, wrapper), func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(get_filesystem_encoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, - ): - self.name = filename - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.Optional[t.IO] - - if filename == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: # noqa: E402 - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> "LazyFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.close_intelligently() - - def __iter__(self) -> t.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO) -> None: - self._file = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> "KeepOpenFile": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> t.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: t.Optional[t.Union[str, bytes]] = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: "te.Literal['stdin', 'stdout', 'stderr']", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO: - """This is similar to how the :class:`File` works but for manual - usage. Files are opened non lazy by default. This can open regular - files as well as stdin/stdout if ``'-'`` is passed. - - If stdin/stdout is returned the stream is wrapped so that the context - manager will not close the stream accidentally. This makes it possible - to always use the function like this without having to worry to - accidentally close a standard stream:: - - with open_file(filename) as f: - ... - - .. versionadded:: 3.0 - - :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). - :param mode: the mode in which to open the file. - :param encoding: the encoding to use. - :param errors: the error handling for this file. - :param lazy: can be flipped to true to open the file lazily. - :param atomic: in atomic mode writes go into a temporary file and it's - moved on close. - """ - if lazy: - return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic)) - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - if not should_close: - f = t.cast(t.IO, KeepOpenFile(f)) - return f - - -def get_os_args() -> t.Sequence[str]: - """Returns the argument part of ``sys.argv``, removing the first - value which is the name of the script. - - .. deprecated:: 8.0 - Will be removed in Click 8.1. Access ``sys.argv[1:]`` directly - instead. - """ - import warnings - - warnings.warn( - "'get_os_args' is deprecated and will be removed in Click 8.1." - " Access 'sys.argv[1:]' directly instead.", - DeprecationWarning, - stacklevel=2, - ) - return sys.argv[1:] - - -def format_filename( - filename: t.Union[str, bytes, os.PathLike], shorten: bool = False -) -> str: - """Formats a filename for user display. The main purpose of this - function is to ensure that the filename can be displayed at all. This - will decode the filename to unicode if necessary in a way that it will - not fail. Optionally, it can shorten the filename to not include the - full path to the filename. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - - return os.fsdecode(filename) - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no affect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: t.Optional[str] = None, _main: ModuleType = sys.modules["__main__"] -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - if getattr(_main, "__package__", None) is None or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: t.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> t.List[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - matches = glob(arg, recursive=glob_recursive) - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/venv/lib/python3.8/site-packages/easy_install.py b/venv/lib/python3.8/site-packages/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/venv/lib/python3.8/site-packages/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/venv/lib/python3.8/site-packages/flask/__init__.py b/venv/lib/python3.8/site-packages/flask/__init__.py deleted file mode 100644 index 43b5468..0000000 --- a/venv/lib/python3.8/site-packages/flask/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -from markupsafe import escape -from markupsafe import Markup -from werkzeug.exceptions import abort as abort -from werkzeug.utils import redirect as redirect - -from . import json as json -from .app import Flask as Flask -from .app import Request as Request -from .app import Response as Response -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import _app_ctx_stack as _app_ctx_stack -from .globals import _request_ctx_stack as _request_ctx_stack -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import safe_join as safe_join -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import signals_available as signals_available -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string - -__version__ = "2.0.2" diff --git a/venv/lib/python3.8/site-packages/flask/__main__.py b/venv/lib/python3.8/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/venv/lib/python3.8/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/venv/lib/python3.8/site-packages/flask/app.py b/venv/lib/python3.8/site-packages/flask/app.py deleted file mode 100644 index 23b99e2..0000000 --- a/venv/lib/python3.8/site-packages/flask/app.py +++ /dev/null @@ -1,2091 +0,0 @@ -import functools -import inspect -import logging -import os -import sys -import typing as t -import weakref -from datetime import timedelta -from itertools import chain -from threading import Lock -from types import TracebackType - -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.local import ContextVar -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.wrappers import Response as BaseResponse - -from . import cli -from . import json -from .config import Config -from .config import ConfigAttribute -from .ctx import _AppCtxGlobals -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _request_ctx_stack -from .globals import g -from .globals import request -from .globals import session -from .helpers import _split_blueprint_path -from .helpers import get_debug_flag -from .helpers import get_env -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import locked_cached_property -from .helpers import url_for -from .json import jsonify -from .logging import create_logger -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod -from .sessions import SecureCookieSessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import DispatchingJinjaLoader -from .templating import Environment -from .typing import BeforeFirstRequestCallable -from .typing import ResponseReturnValue -from .typing import TeardownCallable -from .typing import TemplateFilterCallable -from .typing import TemplateGlobalCallable -from .typing import TemplateTestCallable -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: - import typing_extensions as te - from .blueprints import Blueprint - from .testing import FlaskClient - from .testing import FlaskCliRunner - from .typing import ErrorHandlerCallable - -if sys.version_info >= (3, 8): - iscoroutinefunction = inspect.iscoroutinefunction -else: - - def iscoroutinefunction(func: t.Any) -> bool: - while inspect.ismethod(func): - func = func.__func__ - - while isinstance(func, functools.partial): - func = func.func - - return inspect.iscoroutinefunction(func) - - -def _make_timedelta(value: t.Optional[timedelta]) -> t.Optional[timedelta]: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class = Response - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute("SECRET_KEY") - - #: The secure cookie uses this for the name of the session cookie. - #: - #: This attribute can also be configured from the config with the - #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` - session_cookie_name = ConfigAttribute("SESSION_COOKIE_NAME") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute( - "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta - ) - - #: A :class:`~datetime.timedelta` or number of seconds which is used - #: as the default ``max_age`` for :func:`send_file`. The default is - #: ``None``, which tells the browser to use conditional requests - #: instead of a timed cache. - #: - #: Configured with the :data:`SEND_FILE_MAX_AGE_DEFAULT` - #: configuration key. - #: - #: .. versionchanged:: 2.0 - #: Defaults to ``None`` instead of 12 hours. - send_file_max_age_default = ConfigAttribute( - "SEND_FILE_MAX_AGE_DEFAULT", get_converter=_make_timedelta - ) - - #: Enable this if you want to use the X-Sendfile feature. Keep in - #: mind that the server has to support this. This only affects files - #: sent with the :func:`send_file` method. - #: - #: .. versionadded:: 0.2 - #: - #: This attribute can also be configured from the config with the - #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. - use_x_sendfile = ConfigAttribute("USE_X_SENDFILE") - - #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. - #: - #: .. versionadded:: 0.10 - json_encoder = json.JSONEncoder - - #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. - #: - #: .. versionadded:: 0.10 - json_decoder = json.JSONDecoder - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict = {} - - #: Default configuration parameters. - default_config = ImmutableDict( - { - "ENV": None, - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "PRESERVE_CONTEXT_ON_EXCEPTION": None, - "SECRET_KEY": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "JSON_AS_ASCII": True, - "JSON_SORT_KEYS": True, - "JSONIFY_PRETTYPRINT_REGULAR": False, - "JSONIFY_MIMETYPE": "application/json", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - } - ) - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: t.Optional[t.Type["FlaskClient"]] = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: t.Optional[t.Type["FlaskCliRunner"]] = None - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: t.Optional[str] = None, - static_folder: t.Optional[t.Union[str, os.PathLike]] = "static", - static_host: t.Optional[str] = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: t.Optional[str] = "templates", - instance_path: t.Optional[str] = None, - instance_relative_config: bool = False, - root_path: t.Optional[str] = None, - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: A list of functions that are called when :meth:`url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function registered here - #: is called with `error`, `endpoint` and `values`. If a function - #: returns ``None`` or raises a :exc:`BuildError` the next function is - #: tried. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: t.List[ - t.Callable[[Exception, str, dict], str] - ] = [] - - #: A list of functions that will be called at the beginning of the - #: first request to this instance. To register a function, use the - #: :meth:`before_first_request` decorator. - #: - #: .. versionadded:: 0.8 - self.before_first_request_funcs: t.List[BeforeFirstRequestCallable] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: t.List[TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: t.List[t.Callable[[], t.Dict[str, t.Any]]] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: t.Dict[str, "Blueprint"] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class() - - self.url_map.host_matching = host_matching - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - self._before_request_lock = Lock() - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert ( - bool(static_host) == host_matching - ), "Invalid static_host/host_matching combination" - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def _is_setup_finished(self) -> bool: - return self.debug and self._got_first_request - - @locked_cached_property - def name(self) -> str: # type: ignore - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @property - def propagate_exceptions(self) -> bool: - """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration - value in case it's set, otherwise a sensible default is returned. - - .. versionadded:: 0.7 - """ - rv = self.config["PROPAGATE_EXCEPTIONS"] - if rv is not None: - return rv - return self.testing or self.debug - - @property - def preserve_context_on_exception(self) -> bool: - """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` - configuration value in case it's set, otherwise a sensible default - is returned. - - .. versionadded:: 0.7 - """ - rv = self.config["PRESERVE_CONTEXT_ON_EXCEPTION"] - if rv is not None: - return rv - return self.debug - - @locked_cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @locked_cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - @property - def got_first_request(self) -> bool: - """This attribute is set to ``True`` if the application started - handling the first request. - - .. versionadded:: 0.8 - """ - return self._got_first_request - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["ENV"] = get_env() - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Opens a resource from the application's instance folder - (:attr:`instance_path`). Otherwise works like - :meth:`open_resource`. Instance resources can also be opened for - writing. - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - return open(os.path.join(self.instance_path, resource), mode) - - @property - def templates_auto_reload(self) -> bool: - """Reload templates when they are changed. Used by - :meth:`create_jinja_environment`. - - This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If - not set, it will be enabled in debug mode. - - .. versionadded:: 1.0 - This property was added but the underlying config and behavior - already existed. - """ - rv = self.config["TEMPLATES_AUTO_RELOAD"] - return rv if rv is not None else self.debug - - @templates_auto_reload.setter - def templates_auto_reload(self, value: bool) -> None: - self.config["TEMPLATES_AUTO_RELOAD"] = value - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - options["auto_reload"] = self.templates_auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = json.dumps - return rv - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml")) - - def update_template_context(self, context: dict) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[t.Optional[str]] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(func()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - #: What environment the app is running in. Flask and extensions may - #: enable behaviors based on the environment, such as enabling debug - #: mode. This maps to the :data:`ENV` config key. This is set by the - #: :envvar:`FLASK_ENV` environment variable and may not behave as - #: expected if set in code. - #: - #: **Do not enable development when deploying in production.** - #: - #: Default: ``'production'`` - env = ConfigAttribute("ENV") - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start - the development server, an interactive debugger will be shown for - unhandled exceptions, and the server will be reloaded when code - changes. This maps to the :data:`DEBUG` config key. This is - enabled when :attr:`env` is ``'development'`` and is overridden - by the ``FLASK_DEBUG`` environment variable. It may not behave as - expected if set in code. - - **Do not enable debug mode when deploying in production.** - - Default: ``True`` if :attr:`env` is ``'development'``, or - ``False`` otherwise. - """ - return self.config["DEBUG"] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - self.jinja_env.auto_reload = self.templates_auto_reload - - def run( - self, - host: t.Optional[str] = None, - port: t.Optional[int] = None, - debug: t.Optional[bool] = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` - environment variables will override :attr:`env` and - :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Change this into a no-op if the server is invoked from the - # command line. Have a look at cli.py for more information. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - from .debughelpers import explain_ignored_app_run - - explain_ignored_app_run() - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, let env vars override previous values - if "FLASK_ENV" in os.environ: - self.env = get_env() - self.debug = get_debug_flag() - elif "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.env, self.debug, self.name, False) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> "FlaskClient": - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls # type: ignore - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> "FlaskCliRunner": - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls # type: ignore - - return cls(self, **kwargs) # type: ignore - - @setupmethod - def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView["Blueprint"]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: t.Optional[str] = None, - view_func: t.Optional[t.Callable] = None, - provide_automatic_options: t.Optional[bool] = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule = self.url_rule_class(rule, methods=methods, **options) - rule.provide_automatic_options = provide_automatic_options # type: ignore - - self.url_map.add(rule) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateFilterCallable], TemplateFilterCallable]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: TemplateFilterCallable) -> TemplateFilterCallable: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: TemplateFilterCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateTestCallable], TemplateTestCallable]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: TemplateTestCallable) -> TemplateTestCallable: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: TemplateTestCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateGlobalCallable], TemplateGlobalCallable]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: TemplateGlobalCallable) -> TemplateGlobalCallable: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: TemplateGlobalCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def before_first_request( - self, f: BeforeFirstRequestCallable - ) -> BeforeFirstRequestCallable: - """Registers a function to be run before the first request to this - instance of the application. - - The function will be called without any arguments and its return - value is ignored. - - .. versionadded:: 0.8 - """ - self.before_first_request_funcs.append(f) - return f - - @setupmethod - def teardown_appcontext(self, f: TeardownCallable) -> TeardownCallable: - """Registers a function to be called when the application context - ends. These functions are typically also called when the request - context is popped. - - Example:: - - ctx = app.app_context() - ctx.push() - ... - ctx.pop() - - When ``ctx.pop()`` is executed in the above example, the teardown - functions are called just before the app context moves from the - stack of active contexts. This becomes relevant if you are using - such constructs in tests. - - Since a request context typically also manages an application - context it would also be called when you pop a request context. - - When a teardown function was called because of an unhandled exception - it will be passed an error object. If an :meth:`errorhandler` is - registered, it will handle the exception and the teardown will not - receive it. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor(self, f: t.Callable) -> t.Callable: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler( - self, e: Exception - ) -> t.Optional["ErrorHandlerCallable[Exception]"]: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*request.blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def handle_http_exception( - self, e: HTTPException - ) -> t.Union[HTTPException, ResponseReturnValue]: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e) - if handler is None: - return e - return self.ensure_sync(handler)(e) - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def handle_user_exception( - self, e: Exception - ) -> t.Union[HTTPException, ResponseReturnValue]: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :attr:`propagate_exceptions` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, exception=e) - - if self.propagate_exceptions: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: t.Union[InternalServerError, ResponseReturnValue] - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: t.Union[ - t.Tuple[type, BaseException, TracebackType], t.Tuple[None, None, None] - ], - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def raise_routing_exception(self, request: Request) -> "te.NoReturn": - """Exceptions that are recording during routing are reraised with - this method. During debug we are not reraising redirect requests - for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising - a different error instead to help debug situations. - - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.method in ("GET", "HEAD", "OPTIONS") - ): - raise request.routing_exception # type: ignore - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def dispatch_request(self) -> ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = _request_ctx_stack.top.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule = req.url_rule - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - return self.ensure_sync(self.view_functions[rule.endpoint])(**req.view_args) - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self.try_trigger_before_first_request_functions() - try: - request_started.send(self) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: t.Union[ResponseReturnValue, HTTPException], - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send(self, response=response) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def try_trigger_before_first_request_functions(self) -> None: - """Called before each request and will ensure that it triggers - the :attr:`before_first_request_funcs` and only exactly once per - application instance (which means process usually). - - :internal: - """ - if self._got_first_request: - return - with self._before_request_lock: - if self._got_first_request: - return - for func in self.before_first_request_funcs: - self.ensure_sync(func)() - self._got_first_request = True - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = _request_ctx_stack.top.url_adapter - methods = adapter.allowed_methods() - rv = self.response_class() - rv.allow.update(methods) - return rv - - def should_ignore_error(self, error: t.Optional[BaseException]) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def ensure_sync(self, func: t.Callable) -> t.Callable: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - # Check that Werkzeug isn't using its fallback ContextVar class. - if ContextVar.__module__ == "werkzeug.local": - raise RuntimeError( - "Async cannot be used with this combination of Python " - "and Greenlet versions." - ) - - return asgiref_async_to_sync(func) - - def make_response(self, rv: ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status = headers = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv - else: - rv, status = rv - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class(rv, status=status, headers=headers) - status = headers = None - elif isinstance(rv, dict): - rv = jsonify(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type(rv, request.environ) # type: ignore # noqa: B950 - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, tuple, Response instance, or WSGI" - f" callable, but it was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, tuple, Response instance, or WSGI" - f" callable, but it was a {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status # type: ignore - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) - - return rv - - def create_url_adapter( - self, request: t.Optional[Request] - ) -> t.Optional[MapAdapter]: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionadded:: 0.6 - - .. versionchanged:: 0.9 - This can now also be called without a request object when the - URL adapter is created for the application context. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - """ - if request is not None: - # If subdomain matching is disabled (the default), use the - # default subdomain in all cases. This should be the default - # in Werkzeug but it currently does not have that feature. - if not self.subdomain_matching: - subdomain = self.url_map.default_subdomain or None - else: - subdomain = None - - return self.url_map.bind_to_environ( - request.environ, - server_name=self.config["SERVER_NAME"], - subdomain=subdomain, - ) - # We need at the very least the server name to be set for this - # to work. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def inject_url_defaults(self, endpoint: str, values: dict) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[t.Optional[str]] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: Exception, endpoint: str, values: dict - ) -> str: - """Handle :class:`~werkzeug.routing.BuildError` on - :meth:`url_for`. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error - - def preprocess_request(self) -> t.Optional[ResponseReturnValue]: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = _request_ctx_stack.top - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, exc: t.Optional[BaseException] = _sentinel # type: ignore - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, exc=exc) - - def do_teardown_appcontext( - self, exc: t.Optional[BaseException] = _sentinel # type: ignore - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: dict) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: t.Optional[BaseException] = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if self.should_ignore_error(error): - error = None - ctx.auto_pop(error) - - def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/flask/blueprints.py b/venv/lib/python3.8/site-packages/flask/blueprints.py deleted file mode 100644 index 5c23a73..0000000 --- a/venv/lib/python3.8/site-packages/flask/blueprints.py +++ /dev/null @@ -1,609 +0,0 @@ -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .typing import AfterRequestCallable -from .typing import BeforeFirstRequestCallable -from .typing import BeforeRequestCallable -from .typing import TeardownCallable -from .typing import TemplateContextProcessorCallable -from .typing import TemplateFilterCallable -from .typing import TemplateGlobalCallable -from .typing import TemplateTestCallable -from .typing import URLDefaultCallable -from .typing import URLValuePreprocessorCallable - -if t.TYPE_CHECKING: - from .app import Flask - from .typing import ErrorHandlerCallable - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: "Blueprint", - app: "Flask", - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: t.Optional[str] = None, - view_func: t.Optional[t.Callable] = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - warn_on_modifications = False - _got_registered_once = False - - #: Blueprint local JSON encoder class to use. Set to ``None`` to use - #: the app's :class:`~flask.Flask.json_encoder`. - json_encoder = None - #: Blueprint local JSON decoder class to use. Set to ``None`` to use - #: the app's :class:`~flask.Flask.json_decoder`. - json_decoder = None - - def __init__( - self, - name: str, - import_name: str, - static_folder: t.Optional[t.Union[str, os.PathLike]] = None, - static_url_path: t.Optional[str] = None, - template_folder: t.Optional[str] = None, - url_prefix: t.Optional[str] = None, - subdomain: t.Optional[str] = None, - url_defaults: t.Optional[dict] = None, - root_path: t.Optional[str] = None, - cli_group: t.Optional[str] = _sentinel, # type: ignore - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: t.List[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: t.List[t.Tuple["Blueprint", dict]] = [] - - def _is_setup_finished(self) -> bool: - return self.warn_on_modifications and self._got_registered_once - - def record(self, func: t.Callable) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - if self._got_registered_once and self.warn_on_modifications: - from warnings import warn - - warn( - Warning( - "The blueprint was already registered once but is" - " getting modified now. These changes will not show" - " up." - ) - ) - self.deferred_functions.append(func) - - def record_once(self, func: t.Callable) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - return self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: "Flask", options: dict, first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: "Flask", options: dict) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionchanged:: 2.0.1 - Registering the same blueprint with the same name multiple - times is deprecated and will become an error in Flask 2.1. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - existing_at = f" '{name}'" if self_name != name else "" - - if app.blueprints[name] is not self: - raise ValueError( - f"The name '{self_name}' is already registered for" - f" a different blueprint{existing_at}. Use 'name='" - " to provide a unique name." - ) - else: - import warnings - - warnings.warn( - f"The name '{self_name}' is already registered for" - f" this blueprint{existing_at}. Use 'name=' to" - " provide a unique name. This will become an error" - " in Flask 2.1.", - stacklevel=4, - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - - def extend(bp_dict, parent_dict): - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: { - exc_class: func for exc_class, func in code_values.items() - } - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - def add_url_rule( - self, - rule: str, - endpoint: t.Optional[str] = None, - view_func: t.Optional[t.Callable] = None, - provide_automatic_options: t.Optional[bool] = None, - **options: t.Any, - ) -> None: - """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for - the :func:`url_for` function is prefixed with the name of the blueprint. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - def app_template_filter( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateFilterCallable], TemplateFilterCallable]: - """Register a custom template filter, available application wide. Like - :meth:`Flask.template_filter` but for a blueprint. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: TemplateFilterCallable) -> TemplateFilterCallable: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - def add_app_template_filter( - self, f: TemplateFilterCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template filter, available application wide. Like - :meth:`Flask.add_template_filter` but for a blueprint. Works exactly - like the :meth:`app_template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - def app_template_test( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateTestCallable], TemplateTestCallable]: - """Register a custom template test, available application wide. Like - :meth:`Flask.template_test` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: TemplateTestCallable) -> TemplateTestCallable: - self.add_app_template_test(f, name=name) - return f - - return decorator - - def add_app_template_test( - self, f: TemplateTestCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template test, available application wide. Like - :meth:`Flask.add_template_test` but for a blueprint. Works exactly - like the :meth:`app_template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - def app_template_global( - self, name: t.Optional[str] = None - ) -> t.Callable[[TemplateGlobalCallable], TemplateGlobalCallable]: - """Register a custom template global, available application wide. Like - :meth:`Flask.template_global` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: TemplateGlobalCallable) -> TemplateGlobalCallable: - self.add_app_template_global(f, name=name) - return f - - return decorator - - def add_app_template_global( - self, f: TemplateGlobalCallable, name: t.Optional[str] = None - ) -> None: - """Register a custom template global, available application wide. Like - :meth:`Flask.add_template_global` but for a blueprint. Works exactly - like the :meth:`app_template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - def before_app_request(self, f: BeforeRequestCallable) -> BeforeRequestCallable: - """Like :meth:`Flask.before_request`. Such a function is executed - before each request, even if outside of a blueprint. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - def before_app_first_request( - self, f: BeforeFirstRequestCallable - ) -> BeforeFirstRequestCallable: - """Like :meth:`Flask.before_first_request`. Such a function is - executed before the first request to the application. - """ - self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) - return f - - def after_app_request(self, f: AfterRequestCallable) -> AfterRequestCallable: - """Like :meth:`Flask.after_request` but for a blueprint. Such a function - is executed after each request, even if outside of the blueprint. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - def teardown_app_request(self, f: TeardownCallable) -> TeardownCallable: - """Like :meth:`Flask.teardown_request` but for a blueprint. Such a - function is executed when tearing down each request, even if outside of - the blueprint. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - def app_context_processor( - self, f: TemplateContextProcessorCallable - ) -> TemplateContextProcessorCallable: - """Like :meth:`Flask.context_processor` but for a blueprint. Such a - function is executed each request, even if outside of the blueprint. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - def app_errorhandler(self, code: t.Union[t.Type[Exception], int]) -> t.Callable: - """Like :meth:`Flask.errorhandler` but for a blueprint. This - handler is used for all requests, even if outside of the blueprint. - """ - - def decorator( - f: "ErrorHandlerCallable[Exception]", - ) -> "ErrorHandlerCallable[Exception]": - self.record_once(lambda s: s.app.errorhandler(code)(f)) - return f - - return decorator - - def app_url_value_preprocessor( - self, f: URLValuePreprocessorCallable - ) -> URLValuePreprocessorCallable: - """Same as :meth:`url_value_preprocessor` but application wide.""" - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - def app_url_defaults(self, f: URLDefaultCallable) -> URLDefaultCallable: - """Same as :meth:`url_defaults` but application wide.""" - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/venv/lib/python3.8/site-packages/flask/cli.py b/venv/lib/python3.8/site-packages/flask/cli.py deleted file mode 100644 index 7ab4fa1..0000000 --- a/venv/lib/python3.8/site-packages/flask/cli.py +++ /dev/null @@ -1,998 +0,0 @@ -import ast -import inspect -import os -import platform -import re -import sys -import traceback -import warnings -from functools import update_wrapper -from operator import attrgetter -from threading import Lock -from threading import Thread - -import click -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_env -from .helpers import get_load_dotenv - -try: - import dotenv -except ImportError: - dotenv = None - -try: - import ssl -except ImportError: - ssl = None # type: ignore - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(script_info, module): - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'" - f" to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = call_factory(script_info, app_factory) - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory {attr_name!r} in module {module.__name__!r}," - " but could not call it without arguments. Use" - f" \"FLASK_APP='{module.__name__}:{attr_name}(args)'\"" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" {module.__name__!r}. Use 'FLASK_APP={module.__name__}:name'" - " to specify one." - ) - - -def call_factory(script_info, app_factory, args=None, kwargs=None): - """Takes an app factory, a ``script_info` object and optionally a tuple - of arguments. Checks for the existence of a script_info argument and calls - the app_factory depending on that and the arguments provided. - """ - sig = inspect.signature(app_factory) - args = [] if args is None else args - kwargs = {} if kwargs is None else kwargs - - if "script_info" in sig.parameters: - warnings.warn( - "The 'script_info' argument is deprecated and will not be" - " passed to the app factory function in Flask 2.1.", - DeprecationWarning, - ) - kwargs["script_info"] = script_info - - if not args and len(sig.parameters) == 1: - first_parameter = next(iter(sig.parameters.values())) - - if ( - first_parameter.default is inspect.Parameter.empty - # **kwargs is reported as an empty default, ignore it - and first_parameter.kind is not inspect.Parameter.VAR_KEYWORD - ): - warnings.warn( - "Script info is deprecated and will not be passed as the" - " single argument to the app factory function in Flask" - " 2.1.", - DeprecationWarning, - ) - args.append(script_info) - - return app_factory(*args, **kwargs) - - -def _called_with_wrong_args(f): - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(script_info, module, app_name): - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = kwargs = None - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = call_factory(script_info, attr, args, kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path): - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -def locate_app(script_info, module_name, app_name, raise_if_not_found=True): - __traceback_hide__ = True # noqa: F841 - - try: - __import__(module_name) - except ImportError as e: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: - raise NoAppException( - f"While importing {module_name!r}, an ImportError was raised." - ) from e - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from e - else: - return - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(script_info, module) - else: - return find_app_by_string(script_info, module, app_name) - - -def get_version(ctx, param, value): - if not value or ctx.resilient_parsing: - return - - import werkzeug - from . import __version__ - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {__version__}\n" - f"Werkzeug {werkzeug.__version__}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the flask version", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class DispatchingApp: - """Special application that dispatches to a Flask application which - is imported by name in a background thread. If an error happens - it is recorded and shown as part of the WSGI handling which in case - of the Werkzeug debugger means that it shows up in the browser. - """ - - def __init__(self, loader, use_eager_loading=None): - self.loader = loader - self._app = None - self._lock = Lock() - self._bg_loading_exc = None - - if use_eager_loading is None: - use_eager_loading = os.environ.get("WERKZEUG_RUN_MAIN") != "true" - - if use_eager_loading: - self._load_unlocked() - else: - self._load_in_background() - - def _load_in_background(self): - def _load_app(): - __traceback_hide__ = True # noqa: F841 - with self._lock: - try: - self._load_unlocked() - except Exception as e: - self._bg_loading_exc = e - - t = Thread(target=_load_app, args=()) - t.start() - - def _flush_bg_loading_exception(self): - __traceback_hide__ = True # noqa: F841 - exc = self._bg_loading_exc - - if exc is not None: - self._bg_loading_exc = None - raise exc - - def _load_unlocked(self): - __traceback_hide__ = True # noqa: F841 - self._app = rv = self.loader() - self._bg_loading_exc = None - return rv - - def __call__(self, environ, start_response): - __traceback_hide__ = True # noqa: F841 - if self._app is not None: - return self._app(environ, start_response) - self._flush_bg_loading_exception() - with self._lock: - if self._app is not None: - rv = self._app - else: - rv = self._load_unlocked() - return rv(environ, start_response) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - """ - - def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True): - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path or os.environ.get("FLASK_APP") - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data = {} - self.set_debug_flag = set_debug_flag - self._loaded_app = None - - def load_app(self): - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - __traceback_hide__ = True # noqa: F841 - - if self._loaded_app is not None: - return self._loaded_app - - if self.create_app is not None: - app = call_factory(self, self.create_app) - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, 1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(self, import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(self, import_name, None, raise_if_not_found=False) - - if app: - break - - if not app: - raise NoAppException( - "Could not locate a Flask application. You did not provide " - 'the "FLASK_APP" environment variable, and a "wsgi.py" or ' - '"app.py" module was not found in the current directory.' - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - - -def with_appcontext(f): - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. If callbacks are registered directly - to the ``app.cli`` object then they are wrapped with this function - by default unless it's disabled. - """ - - @click.pass_context - def decorator(__ctx, *args, **kwargs): - with __ctx.ensure_object(ScriptInfo).load_app().app_context(): - return __ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f): - if wrap_for_ctx: - f = with_appcontext(f) - return click.Group.command(self, *args, **kwargs)(f) - - return decorator - - def group(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return click.Group.group(self, *args, **kwargs) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag based on the active - environment - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands=True, - create_app=None, - add_version_option=True, - load_dotenv=True, - set_debug_flag=True, - **extra, - ): - params = list(extra.pop("params", None) or ()) - - if add_version_option: - params.append(version_option) - - AppGroup.__init__(self, params=params, **extra) - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self): - if self._loaded_plugin_commands: - return - try: - import pkg_resources - except ImportError: - self._loaded_plugin_commands = True - return - - for ep in pkg_resources.iter_entry_points("flask.commands"): - self.add_command(ep.load(), ep.name) - self._loaded_plugin_commands = True - - def get_command(self, ctx, name): - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - return info.load_app().cli.get_command(ctx, name) - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - - def list_commands(self, ctx): - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def main(self, *args, **kwargs): - # Set a global flag that indicates that we were invoked from the - # command line interface. This is detected by Flask.run to make the - # call into a no-op. This is necessary to avoid ugly errors when the - # script that is loaded here also attempts to start a server. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - if get_load_dotenv(self.load_dotenv): - load_dotenv() - - obj = kwargs.get("obj") - - if obj is None: - obj = ScriptInfo( - create_app=self.create_app, set_debug_flag=self.set_debug_flag - ) - - kwargs["obj"] = obj - kwargs.setdefault("auto_envvar_prefix", "FLASK") - return super().main(*args, **kwargs) - - -def _path_is_ancestor(path, other): - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv(path=None): - """Load "dotenv" files in order of precedence to set environment variables. - - If an env var is already set it is not overwritten, so earlier files in the - list are preferred over later files. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location instead of searching. - :return: ``True`` if a file was loaded. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionadded:: 1.0 - """ - if dotenv is None: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env or .flaskenv files present." - ' Do "pip install python-dotenv" to use them.', - fg="yellow", - err=True, - ) - - return False - - # if the given path specifies the actual file then return True, - # else False - if path is not None: - if os.path.isfile(path): - return dotenv.load_dotenv(path, encoding="utf-8") - - return False - - new_dir = None - - for name in (".env", ".flaskenv"): - path = dotenv.find_dotenv(name, usecwd=True) - - if not path: - continue - - if new_dir is None: - new_dir = os.path.dirname(path) - - dotenv.load_dotenv(path, encoding="utf-8") - - return new_dir is not None # at least one file was located and loaded - - -def show_server_banner(env, debug, app_import_path, eager_loading): - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if os.environ.get("WERKZEUG_RUN_MAIN") == "true": - return - - if app_import_path is not None: - message = f" * Serving Flask app {app_import_path!r}" - - if not eager_loading: - message += " (lazy loading)" - - click.echo(message) - - click.echo(f" * Environment: {env}") - - if env == "production": - click.secho( - " WARNING: This is a development server. Do not use it in" - " a production deployment.", - fg="red", - ) - click.secho(" Use a production WSGI server instead.", dim=True) - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self): - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert(self, value, param, ctx): - if ssl is None: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx, param, value): - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - is_context = ssl and isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert(self, value, param, ctx): - items = self.split_envvar_value(value) - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS." -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--eager-loading/--lazy-loading", - default=None, - help="Enable or disable eager loading. By default eager " - "loading is enabled if the reloader is disabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files -): - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default if - FLASK_ENV=development or FLASK_DEBUG=1. - """ - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(get_env(), debug, info.app_import_path, eager_loading) - app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) - - from werkzeug.serving import run_simple - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - ) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - from .globals import _app_ctx_stack - - app = _app_ctx_stack.top.app - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {app.import_name} [{app.env}]\n" - f"Instance: {app.instance_path}" - ) - ctx: dict = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "rule", "match")), - default="endpoint", - help=( - 'Method to sort routes by. "match" is the order that Flask will match ' - "routes when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - - rules = list(current_app.url_map.iter_rules()) - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) - - if sort in ("endpoint", "rule"): - rules = sorted(rules, key=attrgetter(sort)) - elif sort == "methods": - rules = sorted(rules, key=lambda rule: sorted(rule.methods)) # type: ignore - - rule_methods = [ - ", ".join(sorted(rule.methods - ignored_methods)) # type: ignore - for rule in rules - ] - - headers = ("Endpoint", "Methods", "Rule") - widths = ( - max(len(rule.endpoint) for rule in rules), - max(len(methods) for methods in rule_methods), - max(len(rule.rule) for rule in rules), - ) - widths = [max(len(h), w) for h, w in zip(headers, widths)] - row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) - - click.echo(row.format(*headers).strip()) - click.echo(row.format(*("-" * width for width in widths))) - - for rule, methods in zip(rules, rule_methods): - click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) - - -cli = FlaskGroup( - help="""\ -A general utility script for Flask applications. - -Provides commands from Flask, extensions, and the application. Loads the -application defined in the FLASK_APP environment variable, or from a wsgi.py -file. Setting the FLASK_ENV environment variable to 'development' will enable -debug mode. - -\b - {prefix}{cmd} FLASK_APP=hello.py - {prefix}{cmd} FLASK_ENV=development - {prefix}flask run -""".format( - cmd="export" if os.name == "posix" else "set", - prefix="$ " if os.name == "posix" else "> ", - ) -) - - -def main() -> None: - if int(click.__version__[0]) < 8: - warnings.warn( - "Using the `flask` cli with Click 7 is deprecated and" - " will not be supported starting with Flask 2.1." - " Please upgrade to Click 8 as soon as possible.", - DeprecationWarning, - ) - # TODO omit sys.argv once https://github.com/pallets/click/issues/536 is fixed - cli.main(args=sys.argv[1:]) - - -if __name__ == "__main__": - main() diff --git a/venv/lib/python3.8/site-packages/flask/config.py b/venv/lib/python3.8/site-packages/flask/config.py deleted file mode 100644 index ca76902..0000000 --- a/venv/lib/python3.8/site-packages/flask/config.py +++ /dev/null @@ -1,295 +0,0 @@ -import errno -import os -import types -import typing as t - -from werkzeug.utils import import_string - - -class ConfigAttribute: - """Makes an attribute forward to the config""" - - def __init__(self, name: str, get_converter: t.Optional[t.Callable] = None) -> None: - self.__name__ = name - self.get_converter = get_converter - - def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any: - if obj is None: - return self - rv = obj.config[self.__name__] - if self.get_converter is not None: - rv = self.get_converter(rv) - return rv - - def __set__(self, obj: t.Any, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__(self, root_path: str, defaults: t.Optional[dict] = None) -> None: - dict.__init__(self, defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_pyfile(self, filename: str, silent: bool = False) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: t.Union[object, str]) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str, - load: t.Callable[[t.IO[t.Any]], t.Mapping], - silent: bool = False, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import toml - app.config.from_file("config.toml", load=toml.load) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename) as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_json(self, filename: str, silent: bool = False) -> bool: - """Update the values in the config from a JSON file. The loaded - data is passed to the :meth:`from_mapping` method. - - :param filename: The path to the JSON file. This can be an - absolute path or relative to the config root path. - :param silent: Ignore the file if it doesn't exist. - :return: ``True`` if the file was loaded successfully. - - .. deprecated:: 2.0.0 - Will be removed in Flask 2.1. Use :meth:`from_file` instead. - This was removed early in 2.0.0, was added back in 2.0.1. - - .. versionadded:: 0.11 - """ - import warnings - from . import json - - warnings.warn( - "'from_json' is deprecated and will be removed in Flask" - " 2.1. Use 'from_file(path, json.load)' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.from_file(filename, json.load, silent=silent) - - def from_mapping( - self, mapping: t.Optional[t.Mapping[str, t.Any]] = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with non-upper - keys. - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: t.Dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> t.Dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv/lib/python3.8/site-packages/flask/ctx.py b/venv/lib/python3.8/site-packages/flask/ctx.py deleted file mode 100644 index 5c06463..0000000 --- a/venv/lib/python3.8/site-packages/flask/ctx.py +++ /dev/null @@ -1,480 +0,0 @@ -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from .globals import _app_ctx_stack -from .globals import _request_ctx_stack -from .signals import appcontext_popped -from .signals import appcontext_pushed -from .typing import AfterRequestCallable - -if t.TYPE_CHECKING: - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Optional[t.Any] = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - top = _app_ctx_stack.top - if top is not None: - return f"" - return object.__repr__(self) - - -def after_this_request(f: AfterRequestCallable) -> AfterRequestCallable: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - _request_ctx_stack.top._after_request_functions.append(f) - return f - - -def copy_current_request_context(f: t.Callable) -> t.Callable: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - top = _request_ctx_stack.top - if top is None: - raise RuntimeError( - "This decorator can only be used at local scopes " - "when a request context is on the stack. For instance within " - "view functions." - ) - reqctx = top.copy() - - def wrapper(*args, **kwargs): - with reqctx: - return f(*args, **kwargs) - - return update_wrapper(wrapper, f) - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _request_ctx_stack.top is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _app_ctx_stack.top is not None - - -class AppContext: - """The application context binds an application object implicitly - to the current thread or greenlet, similar to how the - :class:`RequestContext` binds request information. The application - context is also implicitly created if a request context is created - but the application is not on top of the individual application - context. - """ - - def __init__(self, app: "Flask") -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g = app.app_ctx_globals_class() - - # Like request context, app contexts can be pushed multiple times - # but there a basic "refcount" is enough to track them. - self._refcnt = 0 - - def push(self) -> None: - """Binds the app context to the current context.""" - self._refcnt += 1 - _app_ctx_stack.push(self) - appcontext_pushed.send(self.app) - - def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - self._refcnt -= 1 - if self._refcnt <= 0: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - rv = _app_ctx_stack.pop() - assert rv is self, f"Popped wrong app context. ({rv!r} instead of {self!r})" - appcontext_popped.send(self.app) - - def __enter__(self) -> "AppContext": - self.push() - return self - - def __exit__( - self, exc_type: type, exc_value: BaseException, tb: TracebackType - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains all request relevant information. It is - created at the beginning of the request and pushed to the - `_request_ctx_stack` and removed at the end of it. It will create the - URL adapter and request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the request - for you. In debug mode the request context is kept around if - exceptions happen so that interactive debuggers have a chance to - introspect the data. With 0.4 this can also be forced for requests - that did not fail and outside of ``DEBUG`` mode. By setting - ``'flask._preserve_context'`` to ``True`` on the WSGI environment the - context will not pop itself at the end of the request. This is used by - the :meth:`~flask.Flask.test_client` for example to implement the - deferred cleanup functionality. - - You might find this helpful for unittests where you need the - information from the context local around for a little longer. Make - sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in - that situation, otherwise your unittests will leak memory. - """ - - def __init__( - self, - app: "Flask", - environ: dict, - request: t.Optional["Request"] = None, - session: t.Optional["SessionMixin"] = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - self.request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes = None - self.session = session - - # Request contexts can be pushed multiple times and interleaved with - # other request contexts. Now only if the last level is popped we - # get rid of them. Additionally if an application context is missing - # one is created implicitly so for each level we add this information - self._implicit_app_ctx_stack: t.List[t.Optional["AppContext"]] = [] - - # indicator if the context was preserved. Next time another context - # is pushed the preserved context is popped. - self.preserved = False - - # remembers the exception for pop if there is one in case the context - # preservation kicks in. - self._preserved_exc = None - - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: t.List[AfterRequestCallable] = [] - - @property - def g(self) -> AppContext: - return _app_ctx_stack.top.g - - @g.setter - def g(self, value: AppContext) -> None: - _app_ctx_stack.top.g = value - - def copy(self) -> "RequestContext": - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - """Binds the request context to the current context.""" - # If an exception occurs in debug mode or if context preservation is - # activated under exception situations exactly one context stays - # on the stack. The rationale is that you want to access that - # information under debug situations. However if someone forgets to - # pop that context again we want to make sure that on the next push - # it's invalidated, otherwise we run at risk that something leaks - # memory. This is usually only a problem in test suite since this - # functionality is not active in production environments. - top = _request_ctx_stack.top - if top is not None and top.preserved: - top.pop(top._preserved_exc) - - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _app_ctx_stack.top - if app_ctx is None or app_ctx.app != self.app: - app_ctx = self.app.app_context() - app_ctx.push() - self._implicit_app_ctx_stack.append(app_ctx) - else: - self._implicit_app_ctx_stack.append(None) - - _request_ctx_stack.push(self) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - app_ctx = self._implicit_app_ctx_stack.pop() - clear_request = False - - try: - if not self._implicit_app_ctx_stack: - self.preserved = False - self._preserved_exc = None - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - clear_request = True - finally: - rv = _request_ctx_stack.pop() - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - rv.request.environ["werkzeug.request"] = None - - # Get rid of the app as well if necessary. - if app_ctx is not None: - app_ctx.pop(exc) - - assert ( - rv is self - ), f"Popped wrong request context. ({rv!r} instead of {self!r})" - - def auto_pop(self, exc: t.Optional[BaseException]) -> None: - if self.request.environ.get("flask._preserve_context") or ( - exc is not None and self.app.preserve_context_on_exception - ): - self.preserved = True - self._preserved_exc = exc # type: ignore - else: - self.pop(exc) - - def __enter__(self) -> "RequestContext": - self.push() - return self - - def __exit__( - self, exc_type: type, exc_value: BaseException, tb: TracebackType - ) -> None: - # do not pop the request stack if we are in debug mode and an - # exception happened. This will allow the debugger to still - # access the request object in the interactive shell. Furthermore - # the context can be force kept alive for the test client. - # See flask.testing for how this works. - self.auto_pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/venv/lib/python3.8/site-packages/flask/debughelpers.py b/venv/lib/python3.8/site-packages/flask/debughelpers.py deleted file mode 100644 index 212f7d7..0000000 --- a/venv/lib/python3.8/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,172 +0,0 @@ -import os -import typing as t -from warnings import warn - -from .app import Flask -from .blueprints import Blueprint -from .globals import _request_ctx_stack - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request, key): - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self): - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised by Flask in debug mode if it detects a - redirect caused by the routing system when the request method is not - GET, HEAD or OPTIONS. Reasoning: form data will be dropped. - """ - - def __init__(self, request): - exc = request.routing_exception - buf = [ - f"A request was sent to this URL ({request.url}) but a" - " redirect was issued automatically by the routing system" - f" to {exc.new_url!r}." - ] - - # In case just a slash was appended we can be extra helpful - if f"{request.base_url}/" == exc.new_url.split("?")[0]: - buf.append( - " The URL was defined with a trailing slash so Flask" - " will automatically redirect to the URL with the" - " trailing slash if it was accessed without one." - ) - - buf.append( - " Make sure to directly send your" - f" {request.method}-request to this URL since we can't make" - " browsers or HTTP clients redirect with form data reliably" - " or without user interaction." - ) - buf.append("\n\nNote: this exception is only raised in debug mode") - AssertionError.__init__(self, "".join(buf).encode("utf-8")) - - -def attach_enctype_error_multidict(request): - """Since Flask 0.8 we're monkeypatching the files object in case a - request is detected that does not use multipart form data but the files - object is accessed. - """ - oldcls = request.files.__class__ - - class newcls(oldcls): - def __getitem__(self, key): - try: - return oldcls.__getitem__(self, key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key) from e - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader) -> t.Generator: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts(app: Flask, template, attempts) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - reqctx = _request_ctx_stack.top - if reqctx is not None and reqctx.request.blueprint is not None: - blueprint = reqctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, Flask): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) - - -def explain_ignored_app_run() -> None: - if os.environ.get("WERKZEUG_RUN_MAIN") != "true": - warn( - Warning( - "Silently ignoring app.run() because the application is" - " run from the flask command line executable. Consider" - ' putting app.run() behind an if __name__ == "__main__"' - " guard to silence this warning." - ), - stacklevel=3, - ) diff --git a/venv/lib/python3.8/site-packages/flask/globals.py b/venv/lib/python3.8/site-packages/flask/globals.py deleted file mode 100644 index 6d91c75..0000000 --- a/venv/lib/python3.8/site-packages/flask/globals.py +++ /dev/null @@ -1,59 +0,0 @@ -import typing as t -from functools import partial - -from werkzeug.local import LocalProxy -from werkzeug.local import LocalStack - -if t.TYPE_CHECKING: - from .app import Flask - from .ctx import _AppCtxGlobals - from .sessions import SessionMixin - from .wrappers import Request - -_request_ctx_err_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_app_ctx_err_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -to interface with the current application object in some way. To solve -this, set up an application context with app.app_context(). See the -documentation for more information.\ -""" - - -def _lookup_req_object(name): - top = _request_ctx_stack.top - if top is None: - raise RuntimeError(_request_ctx_err_msg) - return getattr(top, name) - - -def _lookup_app_object(name): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError(_app_ctx_err_msg) - return getattr(top, name) - - -def _find_app(): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError(_app_ctx_err_msg) - return top.app - - -# context locals -_request_ctx_stack = LocalStack() -_app_ctx_stack = LocalStack() -current_app: "Flask" = LocalProxy(_find_app) # type: ignore -request: "Request" = LocalProxy(partial(_lookup_req_object, "request")) # type: ignore -session: "SessionMixin" = LocalProxy( # type: ignore - partial(_lookup_req_object, "session") -) -g: "_AppCtxGlobals" = LocalProxy(partial(_lookup_app_object, "g")) # type: ignore diff --git a/venv/lib/python3.8/site-packages/flask/helpers.py b/venv/lib/python3.8/site-packages/flask/helpers.py deleted file mode 100644 index 7b8b087..0000000 --- a/venv/lib/python3.8/site-packages/flask/helpers.py +++ /dev/null @@ -1,836 +0,0 @@ -import os -import pkgutil -import socket -import sys -import typing as t -import warnings -from datetime import datetime -from datetime import timedelta -from functools import lru_cache -from functools import update_wrapper -from threading import RLock - -import werkzeug.utils -from werkzeug.exceptions import NotFound -from werkzeug.routing import BuildError -from werkzeug.urls import url_quote - -from .globals import _app_ctx_stack -from .globals import _request_ctx_stack -from .globals import current_app -from .globals import request -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: - from .wrappers import Response - - -def get_env() -> str: - """Get the environment the app is running in, indicated by the - :envvar:`FLASK_ENV` environment variable. The default is - ``'production'``. - """ - return os.environ.get("FLASK_ENV") or "production" - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated - by the :envvar:`FLASK_DEBUG` environment variable. The default is - ``True`` if :func:`.get_env` returns ``'development'``, or ``False`` - otherwise. - """ - val = os.environ.get("FLASK_DEBUG") - - if not val: - return get_env() == "development" - - return val.lower() not in ("0", "false", "no") - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading dotenv files by setting - :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the - files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -def stream_with_context( - generator_or_function: t.Union[ - t.Iterator[t.AnyStr], t.Callable[..., t.Iterator[t.AnyStr]] - ] -) -> t.Iterator[t.AnyStr]: - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore - - def generator() -> t.Generator: - ctx = _request_ctx_stack.top - if ctx is None: - raise RuntimeError( - "Attempted to stream with context but " - "there was no context in the first place to keep around." - ) - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - yield from gen - finally: - if hasattr(gen, "close"): - gen.close() # type: ignore - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args: t.Any) -> "Response": - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for(endpoint: str, **values: t.Any) -> str: - """Generates a URL to the given endpoint with the method provided. - - Variable arguments that are unknown to the target endpoint are appended - to the generated URL as query arguments. If the value of a query argument - is ``None``, the whole pair is skipped. In case blueprints are active - you can shortcut references to the same blueprint by prefixing the - local endpoint with a dot (``.``). - - This will reference the index function local to the current blueprint:: - - url_for('.index') - - See :ref:`url-building`. - - Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when - generating URLs outside of a request context. - - To integrate applications, :class:`Flask` has a hook to intercept URL build - errors through :attr:`Flask.url_build_error_handlers`. The `url_for` - function results in a :exc:`~werkzeug.routing.BuildError` when the current - app does not have a URL for the given endpoint and values. When it does, the - :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if - it is not ``None``, which can return a string to use as the result of - `url_for` (instead of `url_for`'s default to raise the - :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. - An example:: - - def external_url_handler(error, endpoint, values): - "Looks up an external URL when `url_for` cannot build a URL." - # This is an example of hooking the build_error_handler. - # Here, lookup_url is some utility function you've built - # which looks up the endpoint in some external URL registry. - url = lookup_url(endpoint, **values) - if url is None: - # External lookup did not have a URL. - # Re-raise the BuildError, in context of original traceback. - exc_type, exc_value, tb = sys.exc_info() - if exc_value is error: - raise exc_type(exc_value).with_traceback(tb) - else: - raise error - # url_for will use this result, instead of raising BuildError. - return url - - app.url_build_error_handlers.append(external_url_handler) - - Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and - `endpoint` and `values` are the arguments passed into `url_for`. Note - that this is for building URLs outside the current application, and not for - handling 404 NotFound errors. - - .. versionadded:: 0.10 - The `_scheme` parameter was added. - - .. versionadded:: 0.9 - The `_anchor` and `_method` parameters were added. - - .. versionadded:: 0.9 - Calls :meth:`Flask.handle_build_error` on - :exc:`~werkzeug.routing.BuildError`. - - :param endpoint: the endpoint of the URL (name of the function) - :param values: the variable arguments of the URL rule - :param _external: if set to ``True``, an absolute URL is generated. Server - address can be changed via ``SERVER_NAME`` configuration variable which - falls back to the `Host` header, then to the IP and port of the request. - :param _scheme: a string specifying the desired URL scheme. The `_external` - parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default - behavior uses the same scheme as the current request, or - :data:`PREFERRED_URL_SCHEME` if no request context is available. - This also can be set to an empty string to build protocol-relative - URLs. - :param _anchor: if provided this is added as anchor to the URL. - :param _method: if provided this explicitly specifies an HTTP method. - """ - appctx = _app_ctx_stack.top - reqctx = _request_ctx_stack.top - - if appctx is None: - raise RuntimeError( - "Attempted to generate a URL without the application context being" - " pushed. This has to be executed when application context is" - " available." - ) - - # If request specific information is available we have some extra - # features that support "relative" URLs. - if reqctx is not None: - url_adapter = reqctx.url_adapter - blueprint_name = request.blueprint - - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - external = values.pop("_external", False) - - # Otherwise go with the url adapter from the appctx and make - # the URLs external by default. - else: - url_adapter = appctx.url_adapter - - if url_adapter is None: - raise RuntimeError( - "Application was not able to create a URL adapter for request" - " independent URL generation. You might be able to fix this by" - " setting the SERVER_NAME config variable." - ) - - external = values.pop("_external", True) - - anchor = values.pop("_anchor", None) - method = values.pop("_method", None) - scheme = values.pop("_scheme", None) - appctx.app.inject_url_defaults(endpoint, values) - - # This is not the best way to deal with this but currently the - # underlying Werkzeug router does not support overriding the scheme on - # a per build call basis. - old_scheme = None - if scheme is not None: - if not external: - raise ValueError("When specifying _scheme, _external must be True") - old_scheme = url_adapter.url_scheme - url_adapter.url_scheme = scheme - - try: - try: - rv = url_adapter.build( - endpoint, values, method=method, force_external=external - ) - finally: - if old_scheme is not None: - url_adapter.url_scheme = old_scheme - except BuildError as error: - # We need to inject the values again so that the app callback can - # deal with that sort of stuff. - values["_external"] = external - values["_anchor"] = anchor - values["_method"] = method - values["_scheme"] = scheme - return appctx.app.handle_url_build_error(error, endpoint, values) - - if anchor is not None: - rv += f"#{url_quote(anchor)}" - return rv - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - message_flashed.send( - current_app._get_current_object(), # type: ignore - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> t.Union[t.List[str], t.List[t.Tuple[str, str]]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = _request_ctx_stack.top.flashes - if flashes is None: - _request_ctx_stack.top.flashes = flashes = ( - session.pop("_flashes") if "_flashes" in session else [] - ) - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs( - download_name: t.Optional[str] = None, - attachment_filename: t.Optional[str] = None, - etag: t.Optional[t.Union[bool, str]] = None, - add_etags: t.Optional[t.Union[bool]] = None, - max_age: t.Optional[ - t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] - ] = None, - cache_timeout: t.Optional[int] = None, - **kwargs: t.Any, -) -> t.Dict[str, t.Any]: - if attachment_filename is not None: - warnings.warn( - "The 'attachment_filename' parameter has been renamed to" - " 'download_name'. The old name will be removed in Flask" - " 2.1.", - DeprecationWarning, - stacklevel=3, - ) - download_name = attachment_filename - - if cache_timeout is not None: - warnings.warn( - "The 'cache_timeout' parameter has been renamed to" - " 'max_age'. The old name will be removed in Flask 2.1.", - DeprecationWarning, - stacklevel=3, - ) - max_age = cache_timeout - - if add_etags is not None: - warnings.warn( - "The 'add_etags' parameter has been renamed to 'etag'. The" - " old name will be removed in Flask 2.1.", - DeprecationWarning, - stacklevel=3, - ) - etag = add_etags - - if max_age is None: - max_age = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - download_name=download_name, - etag=etag, - max_age=max_age, - use_x_sendfile=current_app.use_x_sendfile, - response_class=current_app.response_class, - _root_path=current_app.root_path, # type: ignore - ) - return kwargs - - -def send_file( - path_or_file: t.Union[os.PathLike, str, t.BinaryIO], - mimetype: t.Optional[str] = None, - as_attachment: bool = False, - download_name: t.Optional[str] = None, - attachment_filename: t.Optional[str] = None, - conditional: bool = True, - etag: t.Union[bool, str] = True, - add_etags: t.Optional[bool] = None, - last_modified: t.Optional[t.Union[datetime, int, float]] = None, - max_age: t.Optional[ - t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] - ] = None, - cache_timeout: t.Optional[int] = None, -): - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - deprecated because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - attachment_filename=attachment_filename, - conditional=conditional, - etag=etag, - add_etags=add_etags, - last_modified=last_modified, - max_age=max_age, - cache_timeout=cache_timeout, - ) - ) - - -def safe_join(directory: str, *pathnames: str) -> str: - """Safely join zero or more untrusted path components to a base - directory to avoid escaping the base directory. - - :param directory: The trusted base directory. - :param pathnames: The untrusted path components relative to the - base directory. - :return: A safe path, otherwise ``None``. - """ - warnings.warn( - "'flask.helpers.safe_join' is deprecated and will be removed in" - " Flask 2.1. Use 'werkzeug.utils.safe_join' instead.", - DeprecationWarning, - stacklevel=2, - ) - path = werkzeug.utils.safe_join(directory, *pathnames) - - if path is None: - raise NotFound() - - return path - - -def send_from_directory( - directory: t.Union[os.PathLike, str], - path: t.Union[os.PathLike, str], - filename: t.Optional[str] = None, - **kwargs: t.Any, -) -> "Response": - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - if filename is not None: - warnings.warn( - "The 'filename' parameter has been renamed to 'path'. The" - " old name will be removed in Flask 2.1.", - DeprecationWarning, - stacklevel=2, - ) - path = filename - - return werkzeug.utils.send_from_directory( # type: ignore - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__"): - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - loader = pkgutil.get_loader(import_name) - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None or import_name == "__main__": - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) # type: ignore - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) - - -class locked_cached_property(werkzeug.utils.cached_property): - """A :func:`property` that is only evaluated once. Like - :class:`werkzeug.utils.cached_property` except access uses a lock - for thread safety. - - .. versionchanged:: 2.0 - Inherits from Werkzeug's ``cached_property`` (and ``property``). - """ - - def __init__( - self, - fget: t.Callable[[t.Any], t.Any], - name: t.Optional[str] = None, - doc: t.Optional[str] = None, - ) -> None: - super().__init__(fget, name=name, doc=doc) - self.lock = RLock() - - def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore - if obj is None: - return self - - with self.lock: - return super().__get__(obj, type=type) - - def __set__(self, obj: object, value: t.Any) -> None: - with self.lock: - super().__set__(obj, value) - - def __delete__(self, obj: object) -> None: - with self.lock: - super().__delete__(obj) - - -def total_seconds(td: timedelta) -> int: - """Returns the total seconds from a timedelta object. - - :param timedelta td: the timedelta to be converted in seconds - - :returns: number of seconds - :rtype: int - - .. deprecated:: 2.0 - Will be removed in Flask 2.1. Use - :meth:`timedelta.total_seconds` instead. - """ - warnings.warn( - "'total_seconds' is deprecated and will be removed in Flask" - " 2.1. Use 'timedelta.total_seconds' instead.", - DeprecationWarning, - stacklevel=2, - ) - return td.days * 60 * 60 * 24 + td.seconds - - -def is_ip(value: str) -> bool: - """Determine if the given string is an IP address. - - :param value: value to check - :type value: str - - :return: True if string is an IP address - :rtype: bool - """ - for family in (socket.AF_INET, socket.AF_INET6): - try: - socket.inet_pton(family, value) - except OSError: - pass - else: - return True - - return False - - -@lru_cache(maxsize=None) -def _split_blueprint_path(name: str) -> t.List[str]: - out: t.List[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/venv/lib/python3.8/site-packages/flask/json/__init__.py b/venv/lib/python3.8/site-packages/flask/json/__init__.py deleted file mode 100644 index 10d5123..0000000 --- a/venv/lib/python3.8/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,357 +0,0 @@ -import decimal -import io -import json as _json -import typing as t -import uuid -import warnings -from datetime import date - -from jinja2.utils import htmlsafe_json_dumps as _jinja_htmlsafe_dumps -from werkzeug.http import http_date - -from ..globals import current_app -from ..globals import request - -if t.TYPE_CHECKING: - from ..app import Flask - from ..wrappers import Response - -try: - import dataclasses -except ImportError: - # Python < 3.7 - dataclasses = None # type: ignore - - -class JSONEncoder(_json.JSONEncoder): - """The default JSON encoder. Handles extra types compared to the - built-in :class:`json.JSONEncoder`. - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - - Assign a subclass of this to :attr:`flask.Flask.json_encoder` or - :attr:`flask.Blueprint.json_encoder` to override the default. - """ - - def default(self, o: t.Any) -> t.Any: - """Convert ``o`` to a JSON serializable type. See - :meth:`json.JSONEncoder.default`. Python does not support - overriding how basic types like ``str`` or ``list`` are - serialized, they are handled before this method. - """ - if isinstance(o, date): - return http_date(o) - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - if hasattr(o, "__html__"): - return str(o.__html__()) - return super().default(o) - - -class JSONDecoder(_json.JSONDecoder): - """The default JSON decoder. - - This does not change any behavior from the built-in - :class:`json.JSONDecoder`. - - Assign a subclass of this to :attr:`flask.Flask.json_decoder` or - :attr:`flask.Blueprint.json_decoder` to override the default. - """ - - -def _dump_arg_defaults( - kwargs: t.Dict[str, t.Any], app: t.Optional["Flask"] = None -) -> None: - """Inject default arguments for dump functions.""" - if app is None: - app = current_app - - if app: - cls = app.json_encoder - bp = app.blueprints.get(request.blueprint) if request else None # type: ignore - if bp is not None and bp.json_encoder is not None: - cls = bp.json_encoder - - kwargs.setdefault("cls", cls) - kwargs.setdefault("ensure_ascii", app.config["JSON_AS_ASCII"]) - kwargs.setdefault("sort_keys", app.config["JSON_SORT_KEYS"]) - else: - kwargs.setdefault("sort_keys", True) - kwargs.setdefault("cls", JSONEncoder) - - -def _load_arg_defaults( - kwargs: t.Dict[str, t.Any], app: t.Optional["Flask"] = None -) -> None: - """Inject default arguments for load functions.""" - if app is None: - app = current_app - - if app: - cls = app.json_decoder - bp = app.blueprints.get(request.blueprint) if request else None # type: ignore - if bp is not None and bp.json_decoder is not None: - cls = bp.json_decoder - - kwargs.setdefault("cls", cls) - else: - kwargs.setdefault("cls", JSONDecoder) - - -def dumps(obj: t.Any, app: t.Optional["Flask"] = None, **kwargs: t.Any) -> str: - """Serialize an object to a string of JSON. - - Takes the same arguments as the built-in :func:`json.dumps`, with - some defaults from application configuration. - - :param obj: Object to serialize to JSON. - :param app: Use this app's config instead of the active app context - or defaults. - :param kwargs: Extra arguments passed to :func:`json.dumps`. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` is deprecated and will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - _dump_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - rv = _json.dumps(obj, **kwargs) - - if encoding is not None: - warnings.warn( - "'encoding' is deprecated and will be removed in Flask 2.1.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(rv, str): - return rv.encode(encoding) # type: ignore - - return rv - - -def dump( - obj: t.Any, fp: t.IO[str], app: t.Optional["Flask"] = None, **kwargs: t.Any -) -> None: - """Serialize an object to JSON written to a file object. - - Takes the same arguments as the built-in :func:`json.dump`, with - some defaults from application configuration. - - :param obj: Object to serialize to JSON. - :param fp: File object to write JSON to. - :param app: Use this app's config instead of the active app context - or defaults. - :param kwargs: Extra arguments passed to :func:`json.dump`. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, is - deprecated and will be removed in Flask 2.1. - """ - _dump_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - show_warning = encoding is not None - - try: - fp.write("") - except TypeError: - show_warning = True - fp = io.TextIOWrapper(fp, encoding or "utf-8") # type: ignore - - if show_warning: - warnings.warn( - "Writing to a binary file, and the 'encoding' argument, is" - " deprecated and will be removed in Flask 2.1.", - DeprecationWarning, - stacklevel=2, - ) - - _json.dump(obj, fp, **kwargs) - - -def loads(s: str, app: t.Optional["Flask"] = None, **kwargs: t.Any) -> t.Any: - """Deserialize an object from a string of JSON. - - Takes the same arguments as the built-in :func:`json.loads`, with - some defaults from application configuration. - - :param s: JSON string to deserialize. - :param app: Use this app's config instead of the active app context - or defaults. - :param kwargs: Extra arguments passed to :func:`json.loads`. - - .. versionchanged:: 2.0 - ``encoding`` is deprecated and will be removed in Flask 2.1. The - data must be a string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - _load_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - - if encoding is not None: - warnings.warn( - "'encoding' is deprecated and will be removed in Flask 2.1." - " The data must be a string or UTF-8 bytes.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(s, bytes): - s = s.decode(encoding) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[str], app: t.Optional["Flask"] = None, **kwargs: t.Any) -> t.Any: - """Deserialize an object from JSON read from a file object. - - Takes the same arguments as the built-in :func:`json.load`, with - some defaults from application configuration. - - :param fp: File object to read JSON from. - :param app: Use this app's config instead of the active app context - or defaults. - :param kwargs: Extra arguments passed to :func:`json.load`. - - .. versionchanged:: 2.0 - ``encoding`` is deprecated and will be removed in Flask 2.1. The - file must be text mode, or binary mode with UTF-8 bytes. - """ - _load_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - - if encoding is not None: - warnings.warn( - "'encoding' is deprecated and will be removed in Flask 2.1." - " The file must be text mode, or binary mode with UTF-8" - " bytes.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(fp.read(0), bytes): - fp = io.TextIOWrapper(fp, encoding) # type: ignore - - return _json.load(fp, **kwargs) - - -def htmlsafe_dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize an object to a string of JSON with :func:`dumps`, then - replace HTML-unsafe characters with Unicode escapes and mark the - result safe with :class:`~markupsafe.Markup`. - - This is available in templates as the ``|tojson`` filter. - - The returned string is safe to render in HTML documents and - ``') - # => <script> do_nasty_stuff() </script> - # sanitize_html('Click here for $100') - # => Click here for $100 - def sanitize_token(self, token): - - # accommodate filters which use token_type differently - token_type = token["type"] - if token_type in ("StartTag", "EndTag", "EmptyTag"): - name = token["name"] - namespace = token["namespace"] - if ((namespace, name) in self.allowed_elements or - (namespace is None and - (namespaces["html"], name) in self.allowed_elements)): - return self.allowed_token(token) - else: - return self.disallowed_token(token) - elif token_type == "Comment": - pass - else: - return token - - def allowed_token(self, token): - if "data" in token: - attrs = token["data"] - attr_names = set(attrs.keys()) - - # Remove forbidden attributes - for to_remove in (attr_names - self.allowed_attributes): - del token["data"][to_remove] - attr_names.remove(to_remove) - - # Remove attributes with disallowed URL values - for attr in (attr_names & self.attr_val_is_uri): - assert attr in attrs - # I don't have a clue where this regexp comes from or why it matches those - # characters, nor why we call unescape. I just know it's always been here. - # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all - # this will do is remove *more* than it otherwise would. - val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', - unescape(attrs[attr])).lower() - # remove replacement characters from unescaped characters - val_unescaped = val_unescaped.replace("\ufffd", "") - try: - uri = urlparse.urlparse(val_unescaped) - except ValueError: - uri = None - del attrs[attr] - if uri and uri.scheme: - if uri.scheme not in self.allowed_protocols: - del attrs[attr] - if uri.scheme == 'data': - m = data_content_type.match(uri.path) - if not m: - del attrs[attr] - elif m.group('content_type') not in self.allowed_content_types: - del attrs[attr] - - for attr in self.svg_attr_val_allows_ref: - if attr in attrs: - attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', - ' ', - unescape(attrs[attr])) - if (token["name"] in self.svg_allow_local_href and - (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', - attrs[(namespaces['xlink'], 'href')])): - del attrs[(namespaces['xlink'], 'href')] - if (None, 'style') in attrs: - attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) - token["data"] = attrs - return token - - def disallowed_token(self, token): - token_type = token["type"] - if token_type == "EndTag": - token["data"] = "" % token["name"] - elif token["data"]: - assert token_type in ("StartTag", "EmptyTag") - attrs = [] - for (ns, name), v in token["data"].items(): - attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) - token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) - else: - token["data"] = "<%s>" % token["name"] - if token.get("selfClosing"): - token["data"] = token["data"][:-1] + "/>" - - token["type"] = "Characters" - - del token["name"] - return token - - def sanitize_css(self, style): - # disallow urls - style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) - - # gauntlet - if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): - return '' - if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): - return '' - - clean = [] - for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): - if not value: - continue - if prop.lower() in self.allowed_css_properties: - clean.append(prop + ': ' + value + ';') - elif prop.split('-')[0].lower() in ['background', 'border', 'margin', - 'padding']: - for keyword in value.split(): - if keyword not in self.allowed_css_keywords and \ - not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa - break - else: - clean.append(prop + ': ' + value + ';') - elif prop.lower() in self.allowed_svg_properties: - clean.append(prop + ': ' + value + ';') - - return ' '.join(clean) diff --git a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py deleted file mode 100644 index 0d12584..0000000 --- a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/filters/whitespace.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -import re - -from . import base -from ..constants import rcdataElements, spaceCharacters -spaceCharacters = "".join(spaceCharacters) - -SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) - - -class Filter(base.Filter): - """Collapses whitespace except in pre, textarea, and script elements""" - spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) - - def __iter__(self): - preserve = 0 - for token in base.Filter.__iter__(self): - type = token["type"] - if type == "StartTag" \ - and (preserve or token["name"] in self.spacePreserveElements): - preserve += 1 - - elif type == "EndTag" and preserve: - preserve -= 1 - - elif not preserve and type == "SpaceCharacters" and token["data"]: - # Test on token["data"] above to not introduce spaces where there were not - token["data"] = " " - - elif not preserve and type == "Characters": - token["data"] = collapse_spaces(token["data"]) - - yield token - - -def collapse_spaces(text): - return SPACES_REGEX.sub(' ', text) diff --git a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py b/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py deleted file mode 100644 index ae41a13..0000000 --- a/venv/lib/python3.8/site-packages/pip/_vendor/html5lib/html5parser.py +++ /dev/null @@ -1,2791 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import with_metaclass, viewkeys - -import types -from collections import OrderedDict - -from . import _inputstream -from . import _tokenizer - -from . import treebuilders -from .treebuilders.base import Marker - -from . import _utils -from .constants import ( - spaceCharacters, asciiUpper2Lower, - specialElements, headingElements, cdataElements, rcdataElements, - tokenTypes, tagTokenTypes, - namespaces, - htmlIntegrationPointElements, mathmlTextIntegrationPointElements, - adjustForeignAttributes as adjustForeignAttributesMap, - adjustMathMLAttributes, adjustSVGAttributes, - E, - _ReparseException -) - - -def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML document as a string or file-like object into a tree - - :arg doc: the document to parse as a string or file-like object - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import parse - >>> parse('

    This is a doc

    ') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parse(doc, **kwargs) - - -def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML fragment as a string or file-like object into a tree - - :arg doc: the fragment to parse as a string or file-like object - - :arg container: the container context to parse the fragment in - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import parseFragment - >>> parseFragment('this is a fragment') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parseFragment(doc, container=container, **kwargs) - - -def method_decorator_metaclass(function): - class Decorated(type): - def __new__(meta, classname, bases, classDict): - for attributeName, attribute in classDict.items(): - if isinstance(attribute, types.FunctionType): - attribute = function(attribute) - - classDict[attributeName] = attribute - return type.__new__(meta, classname, bases, classDict) - return Decorated - - -class HTMLParser(object): - """HTML parser - - Generates a tree structure from a stream of (possibly malformed) HTML. - - """ - - def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): - """ - :arg tree: a treebuilder class controlling the type of tree that will be - returned. Built in treebuilders can be accessed through - html5lib.treebuilders.getTreeBuilder(treeType) - - :arg strict: raise an exception when a parse error is encountered - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :arg debug: whether or not to enable debug mode which logs things - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() # generates parser with etree builder - >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict - - """ - - # Raise an exception on the first error encountered - self.strict = strict - - if tree is None: - tree = treebuilders.getTreeBuilder("etree") - self.tree = tree(namespaceHTMLElements) - self.errors = [] - - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) - - def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): - - self.innerHTMLMode = innerHTML - self.container = container - self.scripting = scripting - self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) - self.reset() - - try: - self.mainLoop() - except _ReparseException: - self.reset() - self.mainLoop() - - def reset(self): - self.tree.reset() - self.firstStartTag = False - self.errors = [] - self.log = [] # only used with debug mode - # "quirks" / "limited quirks" / "no quirks" - self.compatMode = "no quirks" - - if self.innerHTMLMode: - self.innerHTML = self.container.lower() - - if self.innerHTML in cdataElements: - self.tokenizer.state = self.tokenizer.rcdataState - elif self.innerHTML in rcdataElements: - self.tokenizer.state = self.tokenizer.rawtextState - elif self.innerHTML == 'plaintext': - self.tokenizer.state = self.tokenizer.plaintextState - else: - # state already is data state - # self.tokenizer.state = self.tokenizer.dataState - pass - self.phase = self.phases["beforeHtml"] - self.phase.insertHtmlElement() - self.resetInsertionMode() - else: - self.innerHTML = False # pylint:disable=redefined-variable-type - self.phase = self.phases["initial"] - - self.lastPhase = None - - self.beforeRCDataPhase = None - - self.framesetOK = True - - @property - def documentEncoding(self): - """Name of the character encoding that was used to decode the input stream, or - :obj:`None` if that is not determined yet - - """ - if not hasattr(self, 'tokenizer'): - return None - return self.tokenizer.stream.charEncoding[0].name - - def isHTMLIntegrationPoint(self, element): - if (element.name == "annotation-xml" and - element.namespace == namespaces["mathml"]): - return ("encoding" in element.attributes and - element.attributes["encoding"].translate( - asciiUpper2Lower) in - ("text/html", "application/xhtml+xml")) - else: - return (element.namespace, element.name) in htmlIntegrationPointElements - - def isMathMLTextIntegrationPoint(self, element): - return (element.namespace, element.name) in mathmlTextIntegrationPointElements - - def mainLoop(self): - CharactersToken = tokenTypes["Characters"] - SpaceCharactersToken = tokenTypes["SpaceCharacters"] - StartTagToken = tokenTypes["StartTag"] - EndTagToken = tokenTypes["EndTag"] - CommentToken = tokenTypes["Comment"] - DoctypeToken = tokenTypes["Doctype"] - ParseErrorToken = tokenTypes["ParseError"] - - for token in self.normalizedTokens(): - prev_token = None - new_token = token - while new_token is not None: - prev_token = new_token - currentNode = self.tree.openElements[-1] if self.tree.openElements else None - currentNodeNamespace = currentNode.namespace if currentNode else None - currentNodeName = currentNode.name if currentNode else None - - type = new_token["type"] - - if type == ParseErrorToken: - self.parseError(new_token["data"], new_token.get("datavars", {})) - new_token = None - else: - if (len(self.tree.openElements) == 0 or - currentNodeNamespace == self.tree.defaultNamespace or - (self.isMathMLTextIntegrationPoint(currentNode) and - ((type == StartTagToken and - token["name"] not in frozenset(["mglyph", "malignmark"])) or - type in (CharactersToken, SpaceCharactersToken))) or - (currentNodeNamespace == namespaces["mathml"] and - currentNodeName == "annotation-xml" and - type == StartTagToken and - token["name"] == "svg") or - (self.isHTMLIntegrationPoint(currentNode) and - type in (StartTagToken, CharactersToken, SpaceCharactersToken))): - phase = self.phase - else: - phase = self.phases["inForeignContent"] - - if type == CharactersToken: - new_token = phase.processCharacters(new_token) - elif type == SpaceCharactersToken: - new_token = phase.processSpaceCharacters(new_token) - elif type == StartTagToken: - new_token = phase.processStartTag(new_token) - elif type == EndTagToken: - new_token = phase.processEndTag(new_token) - elif type == CommentToken: - new_token = phase.processComment(new_token) - elif type == DoctypeToken: - new_token = phase.processDoctype(new_token) - - if (type == StartTagToken and prev_token["selfClosing"] and - not prev_token["selfClosingAcknowledged"]): - self.parseError("non-void-element-with-trailing-solidus", - {"name": prev_token["name"]}) - - # When the loop finishes it's EOF - reprocess = True - phases = [] - while reprocess: - phases.append(self.phase) - reprocess = self.phase.processEOF() - if reprocess: - assert self.phase not in phases - - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - - def parse(self, stream, *args, **kwargs): - """Parse a HTML document into a well-formed tree - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element). - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parse('

    This is a doc

    ') - - - """ - self._parse(stream, False, None, *args, **kwargs) - return self.tree.getDocument() - - def parseFragment(self, stream, *args, **kwargs): - """Parse a HTML fragment into a well-formed tree fragment - - :arg container: name of the element we're setting the innerHTML - property if set to None, default to 'div' - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parseFragment('this is a fragment') - - - """ - self._parse(stream, True, *args, **kwargs) - return self.tree.getFragment() - - def parseError(self, errorcode="XXX-undefined-error", datavars=None): - # XXX The idea is to make errorcode mandatory. - if datavars is None: - datavars = {} - self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) - if self.strict: - raise ParseError(E[errorcode] % datavars) - - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - - def adjustMathMLAttributes(self, token): - adjust_attributes(token, adjustMathMLAttributes) - - def adjustSVGAttributes(self, token): - adjust_attributes(token, adjustSVGAttributes) - - def adjustForeignAttributes(self, token): - adjust_attributes(token, adjustForeignAttributesMap) - - def reparseTokenNormal(self, token): - # pylint:disable=unused-argument - self.parser.phase() - - def resetInsertionMode(self): - # The name of this method is mostly historical. (It's also used in the - # specification.) - last = False - newModes = { - "select": "inSelect", - "td": "inCell", - "th": "inCell", - "tr": "inRow", - "tbody": "inTableBody", - "thead": "inTableBody", - "tfoot": "inTableBody", - "caption": "inCaption", - "colgroup": "inColumnGroup", - "table": "inTable", - "head": "inBody", - "body": "inBody", - "frameset": "inFrameset", - "html": "beforeHead" - } - for node in self.tree.openElements[::-1]: - nodeName = node.name - new_phase = None - if node == self.tree.openElements[0]: - assert self.innerHTML - last = True - nodeName = self.innerHTML - # Check for conditions that should only happen in the innerHTML - # case - if nodeName in ("select", "colgroup", "head", "html"): - assert self.innerHTML - - if not last and node.namespace != self.tree.defaultNamespace: - continue - - if nodeName in newModes: - new_phase = self.phases[newModes[nodeName]] - break - elif last: - new_phase = self.phases["inBody"] - break - - self.phase = new_phase - - def parseRCDataRawtext(self, token, contentType): - # Generic RCDATA/RAWTEXT Parsing algorithm - assert contentType in ("RAWTEXT", "RCDATA") - - self.tree.insertElement(token) - - if contentType == "RAWTEXT": - self.tokenizer.state = self.tokenizer.rawtextState - else: - self.tokenizer.state = self.tokenizer.rcdataState - - self.originalPhase = self.phase - - self.phase = self.phases["text"] - - -@_utils.memoize -def getPhases(debug): - def log(function): - """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) - - def wrapped(self, *args, **kwargs): - if function.__name__.startswith("process") and len(args) > 0: - token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise - if token['type'] in tagTokenTypes: - info["name"] = token['name'] - - self.parser.log.append((self.parser.tokenizer.state.__name__, - self.parser.phase.__class__.__name__, - self.__class__.__name__, - function.__name__, - info)) - return function(self, *args, **kwargs) - else: - return function(self, *args, **kwargs) - return wrapped - - def getMetaclass(use_metaclass, metaclass_func): - if use_metaclass: - return method_decorator_metaclass(metaclass_func) - else: - return type - - # pylint:disable=unused-argument - class Phase(with_metaclass(getMetaclass(debug, log))): - """Base class for helper object that implements each phase of processing - """ - - def __init__(self, parser, tree): - self.parser = parser - self.tree = tree - - def processEOF(self): - raise NotImplementedError - - def processComment(self, token): - # For most phases the following is correct. Where it's not it will be - # overridden. - self.tree.insertComment(token, self.tree.openElements[-1]) - - def processDoctype(self, token): - self.parser.parseError("unexpected-doctype") - - def processCharacters(self, token): - self.tree.insertText(token["data"]) - - def processSpaceCharacters(self, token): - self.tree.insertText(token["data"]) - - def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) - - def startTagHtml(self, token): - if not self.parser.firstStartTag and token["name"] == "html": - self.parser.parseError("non-html-root") - # XXX Need a check here to see if the first start tag token emitted is - # this token... If it's not, invoke self.parser.parseError(). - for attr, value in token["data"].items(): - if attr not in self.tree.openElements[0].attributes: - self.tree.openElements[0].attributes[attr] = value - self.parser.firstStartTag = False - - def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) - - class InitialPhase(Phase): - def processSpaceCharacters(self, token): - pass - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - correct = token["correct"] - - if (name != "html" or publicId is not None or - systemId is not None and systemId != "about:legacy-compat"): - self.parser.parseError("unknown-doctype") - - if publicId is None: - publicId = "" - - self.tree.insertDoctype(token) - - if publicId != "": - publicId = publicId.translate(asciiUpper2Lower) - - if (not correct or token["name"] != "html" or - publicId.startswith( - ("+//silmaril//dtd html pro v0r11 19970101//", - "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", - "-//as//dtd html 3.0 aswedit + extensions//", - "-//ietf//dtd html 2.0 level 1//", - "-//ietf//dtd html 2.0 level 2//", - "-//ietf//dtd html 2.0 strict level 1//", - "-//ietf//dtd html 2.0 strict level 2//", - "-//ietf//dtd html 2.0 strict//", - "-//ietf//dtd html 2.0//", - "-//ietf//dtd html 2.1e//", - "-//ietf//dtd html 3.0//", - "-//ietf//dtd html 3.2 final//", - "-//ietf//dtd html 3.2//", - "-//ietf//dtd html 3//", - "-//ietf//dtd html level 0//", - "-//ietf//dtd html level 1//", - "-//ietf//dtd html level 2//", - "-//ietf//dtd html level 3//", - "-//ietf//dtd html strict level 0//", - "-//ietf//dtd html strict level 1//", - "-//ietf//dtd html strict level 2//", - "-//ietf//dtd html strict level 3//", - "-//ietf//dtd html strict//", - "-//ietf//dtd html//", - "-//metrius//dtd metrius presentational//", - "-//microsoft//dtd internet explorer 2.0 html strict//", - "-//microsoft//dtd internet explorer 2.0 html//", - "-//microsoft//dtd internet explorer 2.0 tables//", - "-//microsoft//dtd internet explorer 3.0 html strict//", - "-//microsoft//dtd internet explorer 3.0 html//", - "-//microsoft//dtd internet explorer 3.0 tables//", - "-//netscape comm. corp.//dtd html//", - "-//netscape comm. corp.//dtd strict html//", - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", - "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", - "-//spyglass//dtd html 2.0 extended//", - "-//sq//dtd html 2.0 hotmetal + extensions//", - "-//sun microsystems corp.//dtd hotjava html//", - "-//sun microsystems corp.//dtd hotjava strict html//", - "-//w3c//dtd html 3 1995-03-24//", - "-//w3c//dtd html 3.2 draft//", - "-//w3c//dtd html 3.2 final//", - "-//w3c//dtd html 3.2//", - "-//w3c//dtd html 3.2s draft//", - "-//w3c//dtd html 4.0 frameset//", - "-//w3c//dtd html 4.0 transitional//", - "-//w3c//dtd html experimental 19960712//", - "-//w3c//dtd html experimental 970421//", - "-//w3c//dtd w3 html//", - "-//w3o//dtd w3 html 3.0//", - "-//webtechs//dtd mozilla html 2.0//", - "-//webtechs//dtd mozilla html//")) or - publicId in ("-//w3o//dtd w3 html strict 3.0//en//", - "-/w3c/dtd html 4.0 transitional/en", - "html") or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is None or - systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): - self.parser.compatMode = "quirks" - elif (publicId.startswith( - ("-//w3c//dtd xhtml 1.0 frameset//", - "-//w3c//dtd xhtml 1.0 transitional//")) or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is not None): - self.parser.compatMode = "limited quirks" - - self.parser.phase = self.parser.phases["beforeHtml"] - - def anythingElse(self): - self.parser.compatMode = "quirks" - self.parser.phase = self.parser.phases["beforeHtml"] - - def processCharacters(self, token): - self.parser.parseError("expected-doctype-but-got-chars") - self.anythingElse() - return token - - def processStartTag(self, token): - self.parser.parseError("expected-doctype-but-got-start-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEndTag(self, token): - self.parser.parseError("expected-doctype-but-got-end-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEOF(self): - self.parser.parseError("expected-doctype-but-got-eof") - self.anythingElse() - return True - - class BeforeHtmlPhase(Phase): - # helper methods - def insertHtmlElement(self): - self.tree.insertRoot(impliedTagToken("html", "StartTag")) - self.parser.phase = self.parser.phases["beforeHead"] - - # other - def processEOF(self): - self.insertHtmlElement() - return True - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.insertHtmlElement() - return token - - def processStartTag(self, token): - if token["name"] == "html": - self.parser.firstStartTag = True - self.insertHtmlElement() - return token - - def processEndTag(self, token): - if token["name"] not in ("head", "body", "html", "br"): - self.parser.parseError("unexpected-end-tag-before-html", - {"name": token["name"]}) - else: - self.insertHtmlElement() - return token - - class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.startTagHead(impliedTagToken("head", "StartTag")) - return True - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.tree.insertElement(token) - self.tree.headPointer = self.tree.openElements[-1] - self.parser.phase = self.parser.phases["inHead"] - - def startTagOther(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagImplyHead(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagOther(self, token): - self.parser.parseError("end-tag-after-implied-root", - {"name": token["name"]}) - - class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther - - # the real thing - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.parser.parseError("two-heads-are-not-better-than-one") - - def startTagBaseLinkCommand(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - def startTagMeta(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - attributes = token["data"] - if self.parser.tokenizer.stream.charEncoding[1] == "tentative": - if "charset" in attributes: - self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) - elif ("content" in attributes and - "http-equiv" in attributes and - attributes["http-equiv"].lower() == "content-type"): - # Encoding it as UTF-8 here is a hack, as really we should pass - # the abstract Unicode string, and just use the - # ContentAttrParser on that, but using UTF-8 allows all chars - # to be encoded and as a ASCII-superset works. - data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) - parser = _inputstream.ContentAttrParser(data) - codec = parser.parse() - self.parser.tokenizer.stream.changeEncoding(codec) - - def startTagTitle(self, token): - self.parser.parseRCDataRawtext(token, "RCDATA") - - def startTagNoFramesStyle(self, token): - # Need to decide whether to implement the scripting-disabled case - self.parser.parseRCDataRawtext(token, "RAWTEXT") - - def startTagNoscript(self, token): - if self.parser.scripting: - self.parser.parseRCDataRawtext(token, "RAWTEXT") - else: - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inHeadNoscript"] - - def startTagScript(self, token): - self.tree.insertElement(token) - self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState - self.parser.originalPhase = self.parser.phase - self.parser.phase = self.parser.phases["text"] - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHead(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "head", "Expected head got %s" % node.name - self.parser.phase = self.parser.phases["afterHead"] - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.endTagHead(impliedTagToken("head")) - - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.parser.parseError("eof-in-head-noscript") - self.anythingElse() - return True - - def processComment(self, token): - return self.parser.phases["inHead"].processComment(token) - - def processCharacters(self, token): - self.parser.parseError("char-in-head-noscript") - self.anythingElse() - return token - - def processSpaceCharacters(self, token): - return self.parser.phases["inHead"].processSpaceCharacters(token) - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBaseLinkCommand(self, token): - return self.parser.phases["inHead"].processStartTag(token) - - def startTagHeadNoscript(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagNoscript(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "noscript", "Expected noscript got %s" % node.name - self.parser.phase = self.parser.phases["inHead"] - - def endTagBr(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - # Caller must raise parse error first! - self.endTagNoscript(impliedTagToken("noscript")) - - class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBody(self, token): - self.parser.framesetOK = False - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inBody"] - - def startTagFrameset(self, token): - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inFrameset"] - - def startTagFromHead(self, token): - self.parser.parseError("unexpected-start-tag-out-of-my-head", - {"name": token["name"]}) - self.tree.openElements.append(self.tree.headPointer) - self.parser.phases["inHead"].processStartTag(token) - for node in self.tree.openElements[::-1]: - if node.name == "head": - self.tree.openElements.remove(node) - break - - def startTagHead(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.tree.insertElement(impliedTagToken("body", "StartTag")) - self.parser.phase = self.parser.phases["inBody"] - self.parser.framesetOK = True - - class InBodyPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody - # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - # Set this to the default handler - self.processSpaceCharacters = self.processSpaceCharactersNonPre - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def isMatchingFormattingElement(self, node1, node2): - return (node1.name == node2.name and - node1.namespace == node2.namespace and - node1.attributes == node2.attributes) - - # helper - def addFormattingElement(self, token): - self.tree.insertElement(token) - element = self.tree.openElements[-1] - - matchingElements = [] - for node in self.tree.activeFormattingElements[::-1]: - if node is Marker: - break - elif self.isMatchingFormattingElement(node, element): - matchingElements.append(node) - - assert len(matchingElements) <= 3 - if len(matchingElements) == 3: - self.tree.activeFormattingElements.remove(matchingElements[-1]) - self.tree.activeFormattingElements.append(element) - - # the real deal - def processEOF(self): - allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", - "tfoot", "th", "thead", "tr", "body", - "html")) - for node in self.tree.openElements[::-1]: - if node.name not in allowed_elements: - self.parser.parseError("expected-closing-tag-but-got-eof") - break - # Stop parsing - - def processSpaceCharactersDropNewline(self, token): - # Sometimes (start of
    , , and 
    -
    -
    - The debugger caught an exception in your WSGI application. You can now - look at the traceback which led to the error. - If you enable JavaScript you can also use additional features such as code - execution (if the evalex feature is enabled), automatic pasting of the - exceptions and much more. -
    -""" - + FOOTER - + """ - -""" -) - -CONSOLE_HTML = ( - HEADER - + """\ -

    Interactive Console

    -
    -In this console you can execute Python expressions in the context of the -application. The initial namespace was created by the debugger automatically. -
    -
    The Console requires JavaScript.
    -""" - + FOOTER -) - -SUMMARY_HTML = """\ -
    - %(title)s -
      %(frames)s
    - %(description)s -
    -""" - -FRAME_HTML = """\ -
    -

    File "%(filename)s", - line %(lineno)s, - in %(function_name)s

    -
    %(lines)s
    -
    -""" - -SOURCE_LINE_HTML = """\ - - %(lineno)s - %(code)s - -""" - - -def render_console_html(secret: str, evalex_trusted: bool = True) -> str: - return CONSOLE_HTML % { - "evalex": "true", - "evalex_trusted": "true" if evalex_trusted else "false", - "console": "true", - "title": "Console", - "secret": secret, - "traceback_id": -1, - } - - -def get_current_traceback( - ignore_system_exceptions: bool = False, - show_hidden_frames: bool = False, - skip: int = 0, -) -> "Traceback": - """Get the current exception info as `Traceback` object. Per default - calling this method will reraise system exceptions such as generator exit, - system exit or others. This behavior can be disabled by passing `False` - to the function as first parameter. - """ - info = t.cast( - t.Tuple[t.Type[BaseException], BaseException, TracebackType], sys.exc_info() - ) - exc_type, exc_value, tb = info - - if ignore_system_exceptions and exc_type in { - SystemExit, - KeyboardInterrupt, - GeneratorExit, - }: - raise - for _ in range(skip): - if tb.tb_next is None: - break - tb = tb.tb_next - tb = Traceback(exc_type, exc_value, tb) - if not show_hidden_frames: - tb.filter_hidden_frames() - return tb - - -class Line: - """Helper for the source renderer.""" - - __slots__ = ("lineno", "code", "in_frame", "current") - - def __init__(self, lineno: int, code: str) -> None: - self.lineno = lineno - self.code = code - self.in_frame = False - self.current = False - - @property - def classes(self) -> t.List[str]: - rv = ["line"] - if self.in_frame: - rv.append("in-frame") - if self.current: - rv.append("current") - return rv - - def render(self) -> str: - return SOURCE_LINE_HTML % { - "classes": " ".join(self.classes), - "lineno": self.lineno, - "code": escape(self.code), - } - - -class Traceback: - """Wraps a traceback.""" - - def __init__( - self, - exc_type: t.Type[BaseException], - exc_value: BaseException, - tb: TracebackType, - ) -> None: - self.exc_type = exc_type - self.exc_value = exc_value - self.tb = tb - - exception_type = exc_type.__name__ - if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}: - exception_type = f"{exc_type.__module__}.{exception_type}" - self.exception_type = exception_type - - self.groups = [] - memo = set() - while True: - self.groups.append(Group(exc_type, exc_value, tb)) - memo.add(id(exc_value)) - exc_value = exc_value.__cause__ or exc_value.__context__ # type: ignore - if exc_value is None or id(exc_value) in memo: - break - exc_type = type(exc_value) - tb = exc_value.__traceback__ # type: ignore - self.groups.reverse() - self.frames = [frame for group in self.groups for frame in group.frames] - - def filter_hidden_frames(self) -> None: - """Remove the frames according to the paste spec.""" - for group in self.groups: - group.filter_hidden_frames() - - self.frames[:] = [frame for group in self.groups for frame in group.frames] - - @property - def is_syntax_error(self) -> bool: - """Is it a syntax error?""" - return isinstance(self.exc_value, SyntaxError) - - @property - def exception(self) -> str: - """String representation of the final exception.""" - return self.groups[-1].exception - - def log(self, logfile: t.Optional[t.IO[str]] = None) -> None: - """Log the ASCII traceback into a file object.""" - if logfile is None: - logfile = sys.stderr - tb = f"{self.plaintext.rstrip()}\n" - logfile.write(tb) - - def render_summary(self, include_title: bool = True) -> str: - """Render the traceback for the interactive console.""" - title = "" - classes = ["traceback"] - if not self.frames: - classes.append("noframe-traceback") - frames = [] - else: - library_frames = sum(frame.is_library for frame in self.frames) - mark_lib = 0 < library_frames < len(self.frames) - frames = [group.render(mark_lib=mark_lib) for group in self.groups] - - if include_title: - if self.is_syntax_error: - title = "Syntax Error" - else: - title = "Traceback (most recent call last):" - - if self.is_syntax_error: - description = f"
    {escape(self.exception)}
    " - else: - description = f"
    {escape(self.exception)}
    " - - return SUMMARY_HTML % { - "classes": " ".join(classes), - "title": f"

    {title if title else ''}

    ", - "frames": "\n".join(frames), - "description": description, - } - - def render_full( - self, - evalex: bool = False, - secret: t.Optional[str] = None, - evalex_trusted: bool = True, - ) -> str: - """Render the Full HTML page with the traceback info.""" - exc = escape(self.exception) - return PAGE_HTML % { - "evalex": "true" if evalex else "false", - "evalex_trusted": "true" if evalex_trusted else "false", - "console": "false", - "title": exc, - "exception": exc, - "exception_type": escape(self.exception_type), - "summary": self.render_summary(include_title=False), - "plaintext": escape(self.plaintext), - "plaintext_cs": re.sub("-{2,}", "-", self.plaintext), - "traceback_id": self.id, - "secret": secret, - } - - @cached_property - def plaintext(self) -> str: - return "\n".join([group.render_text() for group in self.groups]) - - @property - def id(self) -> int: - return id(self) - - -class Group: - """A group of frames for an exception in a traceback. If the - exception has a ``__cause__`` or ``__context__``, there are multiple - exception groups. - """ - - def __init__( - self, - exc_type: t.Type[BaseException], - exc_value: BaseException, - tb: TracebackType, - ) -> None: - self.exc_type = exc_type - self.exc_value = exc_value - self.info = None - if exc_value.__cause__ is not None: - self.info = ( - "The above exception was the direct cause of the following exception" - ) - elif exc_value.__context__ is not None: - self.info = ( - "During handling of the above exception, another exception occurred" - ) - - self.frames = [] - while tb is not None: - self.frames.append(Frame(exc_type, exc_value, tb)) - tb = tb.tb_next # type: ignore - - def filter_hidden_frames(self) -> None: - # An exception may not have a traceback to filter frames, such - # as one re-raised from ProcessPoolExecutor. - if not self.frames: - return - - new_frames: t.List[Frame] = [] - hidden = False - - for frame in self.frames: - hide = frame.hide - if hide in ("before", "before_and_this"): - new_frames = [] - hidden = False - if hide == "before_and_this": - continue - elif hide in ("reset", "reset_and_this"): - hidden = False - if hide == "reset_and_this": - continue - elif hide in ("after", "after_and_this"): - hidden = True - if hide == "after_and_this": - continue - elif hide or hidden: - continue - new_frames.append(frame) - - # if we only have one frame and that frame is from the codeop - # module, remove it. - if len(new_frames) == 1 and self.frames[0].module == "codeop": - del self.frames[:] - - # if the last frame is missing something went terrible wrong :( - elif self.frames[-1] in new_frames: - self.frames[:] = new_frames - - @property - def exception(self) -> str: - """String representation of the exception.""" - buf = traceback.format_exception_only(self.exc_type, self.exc_value) - rv = "".join(buf).strip() - return _to_str(rv, "utf-8", "replace") - - def render(self, mark_lib: bool = True) -> str: - out = [] - if self.info is not None: - out.append(f'
  • {self.info}:
    ') - for frame in self.frames: - title = f' title="{escape(frame.info)}"' if frame.info else "" - out.append(f"{frame.render(mark_lib=mark_lib)}") - return "\n".join(out) - - def render_text(self) -> str: - out = [] - if self.info is not None: - out.append(f"\n{self.info}:\n") - out.append("Traceback (most recent call last):") - for frame in self.frames: - out.append(frame.render_text()) - out.append(self.exception) - return "\n".join(out) - - -class Frame: - """A single frame in a traceback.""" - - def __init__( - self, - exc_type: t.Type[BaseException], - exc_value: BaseException, - tb: TracebackType, - ) -> None: - self.lineno = tb.tb_lineno - self.function_name = tb.tb_frame.f_code.co_name - self.locals = tb.tb_frame.f_locals - self.globals = tb.tb_frame.f_globals - - fn = inspect.getsourcefile(tb) or inspect.getfile(tb) - if fn[-4:] in (".pyo", ".pyc"): - fn = fn[:-1] - # if it's a file on the file system resolve the real filename. - if os.path.isfile(fn): - fn = os.path.realpath(fn) - self.filename = _to_str(fn, get_filesystem_encoding()) - self.module = self.globals.get("__name__", self.locals.get("__name__")) - self.loader = self.globals.get("__loader__", self.locals.get("__loader__")) - self.code = tb.tb_frame.f_code - - # support for paste's traceback extensions - self.hide = self.locals.get("__traceback_hide__", False) - info = self.locals.get("__traceback_info__") - if info is not None: - info = _to_str(info, "utf-8", "replace") - self.info = info - - def render(self, mark_lib: bool = True) -> str: - """Render a single frame in a traceback.""" - return FRAME_HTML % { - "id": self.id, - "filename": escape(self.filename), - "lineno": self.lineno, - "function_name": escape(self.function_name), - "lines": self.render_line_context(), - "library": "library" if mark_lib and self.is_library else "", - } - - @cached_property - def is_library(self) -> bool: - return any( - self.filename.startswith(os.path.realpath(path)) - for path in sysconfig.get_paths().values() - ) - - def render_text(self) -> str: - return ( - f' File "{self.filename}", line {self.lineno}, in {self.function_name}\n' - f" {self.current_line.strip()}" - ) - - def render_line_context(self) -> str: - before, current, after = self.get_context_lines() - rv = [] - - def render_line(line: str, cls: str) -> None: - line = line.expandtabs().rstrip() - stripped_line = line.strip() - prefix = len(line) - len(stripped_line) - rv.append( - f'
    {" " * prefix}'
    -                f"{escape(stripped_line) if stripped_line else ' '}
    " - ) - - for line in before: - render_line(line, "before") - render_line(current, "current") - for line in after: - render_line(line, "after") - - return "\n".join(rv) - - def get_annotated_lines(self) -> t.List[Line]: - """Helper function that returns lines with extra information.""" - lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)] - - # find function definition and mark lines - if hasattr(self.code, "co_firstlineno"): - lineno = self.code.co_firstlineno - 1 - while lineno > 0: - if _funcdef_re.match(lines[lineno].code): - break - lineno -= 1 - try: - offset = len(inspect.getblock([f"{x.code}\n" for x in lines[lineno:]])) - except TokenError: - offset = 0 - for line in lines[lineno : lineno + offset]: - line.in_frame = True - - # mark current line - try: - lines[self.lineno - 1].current = True - except IndexError: - pass - - return lines - - def eval(self, code: t.Union[str, CodeType], mode: str = "single") -> t.Any: - """Evaluate code in the context of the frame.""" - if isinstance(code, str): - code = compile(code, "", mode) - return eval(code, self.globals, self.locals) - - @cached_property - def sourcelines(self) -> t.List[str]: - """The sourcecode of the file as list of strings.""" - # get sourcecode from loader or file - source = None - if self.loader is not None: - try: - if hasattr(self.loader, "get_source"): - source = self.loader.get_source(self.module) - elif hasattr(self.loader, "get_source_by_code"): - source = self.loader.get_source_by_code(self.code) - except Exception: - # we munch the exception so that we don't cause troubles - # if the loader is broken. - pass - - if source is None: - try: - with open(self.filename, mode="rb") as f: - source = f.read() - except OSError: - return [] - - # already str? return right away - if isinstance(source, str): - return source.splitlines() - - charset = "utf-8" - if source.startswith(codecs.BOM_UTF8): - source = source[3:] - else: - for idx, match in enumerate(_line_re.finditer(source)): - coding_match = _coding_re.search(match.group()) - if coding_match is not None: - charset = coding_match.group(1).decode("utf-8") - break - if idx > 1: - break - - # on broken cookies we fall back to utf-8 too - charset = _to_str(charset) - try: - codecs.lookup(charset) - except LookupError: - charset = "utf-8" - - return source.decode(charset, "replace").splitlines() - - def get_context_lines( - self, context: int = 5 - ) -> t.Tuple[t.List[str], str, t.List[str]]: - before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1] - past = self.sourcelines[self.lineno : self.lineno + context] - return (before, self.current_line, past) - - @property - def current_line(self) -> str: - try: - return self.sourcelines[self.lineno - 1] - except IndexError: - return "" - - @cached_property - def console(self) -> Console: - return Console(self.globals, self.locals) - - @property - def id(self) -> int: - return id(self) diff --git a/venv/lib/python3.8/site-packages/werkzeug/exceptions.py b/venv/lib/python3.8/site-packages/werkzeug/exceptions.py deleted file mode 100644 index 16c3964..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/exceptions.py +++ /dev/null @@ -1,943 +0,0 @@ -"""Implements a number of Python exceptions which can be raised from within -a view to trigger a standard HTTP non-200 response. - -Usage Example -------------- - -.. code-block:: python - - from werkzeug.wrappers.request import Request - from werkzeug.exceptions import HTTPException, NotFound - - def view(request): - raise NotFound() - - @Request.application - def application(request): - try: - return view(request) - except HTTPException as e: - return e - -As you can see from this example those exceptions are callable WSGI -applications. However, they are not Werkzeug response objects. You -can get a response object by calling ``get_response()`` on a HTTP -exception. - -Keep in mind that you may have to pass an environ (WSGI) or scope -(ASGI) to ``get_response()`` because some errors fetch additional -information relating to the request. - -If you want to hook in a different exception page to say, a 404 status -code, you can add a second except for a specific subclass of an error: - -.. code-block:: python - - @Request.application - def application(request): - try: - return view(request) - except NotFound as e: - return not_found(request) - except HTTPException as e: - return e - -""" -import sys -import typing as t -import warnings -from datetime import datetime -from html import escape - -from ._internal import _get_environ - -if t.TYPE_CHECKING: - import typing_extensions as te - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - from .datastructures import WWWAuthenticate - from .sansio.response import Response - from .wrappers.response import Response as WSGIResponse # noqa: F401 - - -class HTTPException(Exception): - """The base class for all HTTP exceptions. This exception can be called as a WSGI - application to render a default error page or you can catch the subclasses - of it independently and render nicer error messages. - """ - - code: t.Optional[int] = None - description: t.Optional[str] = None - - def __init__( - self, - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - ) -> None: - super().__init__() - if description is not None: - self.description = description - self.response = response - - @classmethod - def wrap( - cls, exception: t.Type[BaseException], name: t.Optional[str] = None - ) -> t.Type["HTTPException"]: - """Create an exception that is a subclass of the calling HTTP - exception and the ``exception`` argument. - - The first argument to the class will be passed to the - wrapped ``exception``, the rest to the HTTP exception. If - ``e.args`` is not empty and ``e.show_exception`` is ``True``, - the wrapped exception message is added to the HTTP error - description. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Create a subclass manually - instead. - - .. versionchanged:: 0.15.5 - The ``show_exception`` attribute controls whether the - description includes the wrapped exception message. - - .. versionchanged:: 0.15.0 - The description includes the wrapped exception message. - """ - warnings.warn( - "'HTTPException.wrap' is deprecated and will be removed in" - " Werkzeug 2.1. Create a subclass manually instead.", - DeprecationWarning, - stacklevel=2, - ) - - class newcls(cls, exception): # type: ignore - _description = cls.description - show_exception = False - - def __init__( - self, arg: t.Optional[t.Any] = None, *args: t.Any, **kwargs: t.Any - ) -> None: - super().__init__(*args, **kwargs) - - if arg is None: - exception.__init__(self) - else: - exception.__init__(self, arg) - - @property - def description(self) -> str: - if self.show_exception: - return ( - f"{self._description}\n" - f"{exception.__name__}: {exception.__str__(self)}" - ) - - return self._description # type: ignore - - @description.setter - def description(self, value: str) -> None: - self._description = value - - newcls.__module__ = sys._getframe(1).f_globals["__name__"] - name = name or cls.__name__ + exception.__name__ - newcls.__name__ = newcls.__qualname__ = name - return newcls - - @property - def name(self) -> str: - """The status name.""" - from .http import HTTP_STATUS_CODES - - return HTTP_STATUS_CODES.get(self.code, "Unknown Error") # type: ignore - - def get_description( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> str: - """Get the description.""" - if self.description is None: - description = "" - elif not isinstance(self.description, str): - description = str(self.description) - else: - description = self.description - - description = escape(description).replace("\n", "
    ") - return f"

    {description}

    " - - def get_body( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> str: - """Get the HTML body.""" - return ( - '\n' - f"{self.code} {escape(self.name)}\n" - f"

    {escape(self.name)}

    \n" - f"{self.get_description(environ)}\n" - ) - - def get_headers( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> t.List[t.Tuple[str, str]]: - """Get a list of headers.""" - return [("Content-Type", "text/html; charset=utf-8")] - - def get_response( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> "Response": - """Get a response object. If one was passed to the exception - it's returned directly. - - :param environ: the optional environ for the request. This - can be used to modify the response depending - on how the request looked like. - :return: a :class:`Response` object or a subclass thereof. - """ - from .wrappers.response import Response as WSGIResponse # noqa: F811 - - if self.response is not None: - return self.response - if environ is not None: - environ = _get_environ(environ) - headers = self.get_headers(environ, scope) - return WSGIResponse(self.get_body(environ, scope), self.code, headers) - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - """Call the exception as WSGI application. - - :param environ: the WSGI environment. - :param start_response: the response callable provided by the WSGI - server. - """ - response = t.cast("WSGIResponse", self.get_response(environ)) - return response(environ, start_response) - - def __str__(self) -> str: - code = self.code if self.code is not None else "???" - return f"{code} {self.name}: {self.description}" - - def __repr__(self) -> str: - code = self.code if self.code is not None else "???" - return f"<{type(self).__name__} '{code}: {self.name}'>" - - -class BadRequest(HTTPException): - """*400* `Bad Request` - - Raise if the browser sends something to the application the application - or server cannot handle. - """ - - code = 400 - description = ( - "The browser (or proxy) sent a request that this server could " - "not understand." - ) - - -class BadRequestKeyError(BadRequest, KeyError): - """An exception that is used to signal both a :exc:`KeyError` and a - :exc:`BadRequest`. Used by many of the datastructures. - """ - - _description = BadRequest.description - #: Show the KeyError along with the HTTP error message in the - #: response. This should be disabled in production, but can be - #: useful in a debug mode. - show_exception = False - - def __init__(self, arg: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any): - super().__init__(*args, **kwargs) - - if arg is None: - KeyError.__init__(self) - else: - KeyError.__init__(self, arg) - - @property # type: ignore - def description(self) -> str: # type: ignore - if self.show_exception: - return ( - f"{self._description}\n" - f"{KeyError.__name__}: {KeyError.__str__(self)}" - ) - - return self._description - - @description.setter - def description(self, value: str) -> None: - self._description = value - - -class ClientDisconnected(BadRequest): - """Internal exception that is raised if Werkzeug detects a disconnected - client. Since the client is already gone at that point attempting to - send the error message to the client might not work and might ultimately - result in another exception in the server. Mainly this is here so that - it is silenced by default as far as Werkzeug is concerned. - - Since disconnections cannot be reliably detected and are unspecified - by WSGI to a large extent this might or might not be raised if a client - is gone. - - .. versionadded:: 0.8 - """ - - -class SecurityError(BadRequest): - """Raised if something triggers a security error. This is otherwise - exactly like a bad request error. - - .. versionadded:: 0.9 - """ - - -class BadHost(BadRequest): - """Raised if the submitted host is badly formatted. - - .. versionadded:: 0.11.2 - """ - - -class Unauthorized(HTTPException): - """*401* ``Unauthorized`` - - Raise if the user is not authorized to access a resource. - - The ``www_authenticate`` argument should be used to set the - ``WWW-Authenticate`` header. This is used for HTTP basic auth and - other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate` - to create correctly formatted values. Strictly speaking a 401 - response is invalid if it doesn't provide at least one value for - this header, although real clients typically don't care. - - :param description: Override the default message used for the body - of the response. - :param www-authenticate: A single value, or list of values, for the - WWW-Authenticate header(s). - - .. versionchanged:: 2.0 - Serialize multiple ``www_authenticate`` items into multiple - ``WWW-Authenticate`` headers, rather than joining them - into a single value, for better interoperability. - - .. versionchanged:: 0.15.3 - If the ``www_authenticate`` argument is not set, the - ``WWW-Authenticate`` header is not set. - - .. versionchanged:: 0.15.3 - The ``response`` argument was restored. - - .. versionchanged:: 0.15.1 - ``description`` was moved back as the first argument, restoring - its previous position. - - .. versionchanged:: 0.15.0 - ``www_authenticate`` was added as the first argument, ahead of - ``description``. - """ - - code = 401 - description = ( - "The server could not verify that you are authorized to access" - " the URL requested. You either supplied the wrong credentials" - " (e.g. a bad password), or your browser doesn't understand" - " how to supply the credentials required." - ) - - def __init__( - self, - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - www_authenticate: t.Optional[ - t.Union["WWWAuthenticate", t.Iterable["WWWAuthenticate"]] - ] = None, - ) -> None: - super().__init__(description, response) - - from .datastructures import WWWAuthenticate - - if isinstance(www_authenticate, WWWAuthenticate): - www_authenticate = (www_authenticate,) - - self.www_authenticate = www_authenticate - - def get_headers( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> t.List[t.Tuple[str, str]]: - headers = super().get_headers(environ, scope) - if self.www_authenticate: - headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate) - return headers - - -class Forbidden(HTTPException): - """*403* `Forbidden` - - Raise if the user doesn't have the permission for the requested resource - but was authenticated. - """ - - code = 403 - description = ( - "You don't have the permission to access the requested" - " resource. It is either read-protected or not readable by the" - " server." - ) - - -class NotFound(HTTPException): - """*404* `Not Found` - - Raise if a resource does not exist and never existed. - """ - - code = 404 - description = ( - "The requested URL was not found on the server. If you entered" - " the URL manually please check your spelling and try again." - ) - - -class MethodNotAllowed(HTTPException): - """*405* `Method Not Allowed` - - Raise if the server used a method the resource does not handle. For - example `POST` if the resource is view only. Especially useful for REST. - - The first argument for this exception should be a list of allowed methods. - Strictly speaking the response would be invalid if you don't provide valid - methods in the header which you can do with that list. - """ - - code = 405 - description = "The method is not allowed for the requested URL." - - def __init__( - self, - valid_methods: t.Optional[t.Iterable[str]] = None, - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - ) -> None: - """Takes an optional list of valid http methods - starting with werkzeug 0.3 the list will be mandatory.""" - super().__init__(description=description, response=response) - self.valid_methods = valid_methods - - def get_headers( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> t.List[t.Tuple[str, str]]: - headers = super().get_headers(environ, scope) - if self.valid_methods: - headers.append(("Allow", ", ".join(self.valid_methods))) - return headers - - -class NotAcceptable(HTTPException): - """*406* `Not Acceptable` - - Raise if the server can't return any content conforming to the - `Accept` headers of the client. - """ - - code = 406 - description = ( - "The resource identified by the request is only capable of" - " generating response entities which have content" - " characteristics not acceptable according to the accept" - " headers sent in the request." - ) - - -class RequestTimeout(HTTPException): - """*408* `Request Timeout` - - Raise to signalize a timeout. - """ - - code = 408 - description = ( - "The server closed the network connection because the browser" - " didn't finish the request within the specified time." - ) - - -class Conflict(HTTPException): - """*409* `Conflict` - - Raise to signal that a request cannot be completed because it conflicts - with the current state on the server. - - .. versionadded:: 0.7 - """ - - code = 409 - description = ( - "A conflict happened while processing the request. The" - " resource might have been modified while the request was being" - " processed." - ) - - -class Gone(HTTPException): - """*410* `Gone` - - Raise if a resource existed previously and went away without new location. - """ - - code = 410 - description = ( - "The requested URL is no longer available on this server and" - " there is no forwarding address. If you followed a link from a" - " foreign page, please contact the author of this page." - ) - - -class LengthRequired(HTTPException): - """*411* `Length Required` - - Raise if the browser submitted data but no ``Content-Length`` header which - is required for the kind of processing the server does. - """ - - code = 411 - description = ( - "A request with this method requires a valid Content-" - "Length header." - ) - - -class PreconditionFailed(HTTPException): - """*412* `Precondition Failed` - - Status code used in combination with ``If-Match``, ``If-None-Match``, or - ``If-Unmodified-Since``. - """ - - code = 412 - description = ( - "The precondition on the request for the URL failed positive evaluation." - ) - - -class RequestEntityTooLarge(HTTPException): - """*413* `Request Entity Too Large` - - The status code one should return if the data submitted exceeded a given - limit. - """ - - code = 413 - description = "The data value transmitted exceeds the capacity limit." - - -class RequestURITooLarge(HTTPException): - """*414* `Request URI Too Large` - - Like *413* but for too long URLs. - """ - - code = 414 - description = ( - "The length of the requested URL exceeds the capacity limit for" - " this server. The request cannot be processed." - ) - - -class UnsupportedMediaType(HTTPException): - """*415* `Unsupported Media Type` - - The status code returned if the server is unable to handle the media type - the client transmitted. - """ - - code = 415 - description = ( - "The server does not support the media type transmitted in the request." - ) - - -class RequestedRangeNotSatisfiable(HTTPException): - """*416* `Requested Range Not Satisfiable` - - The client asked for an invalid part of the file. - - .. versionadded:: 0.7 - """ - - code = 416 - description = "The server cannot provide the requested range." - - def __init__( - self, - length: t.Optional[int] = None, - units: str = "bytes", - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - ) -> None: - """Takes an optional `Content-Range` header value based on ``length`` - parameter. - """ - super().__init__(description=description, response=response) - self.length = length - self.units = units - - def get_headers( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> t.List[t.Tuple[str, str]]: - headers = super().get_headers(environ, scope) - if self.length is not None: - headers.append(("Content-Range", f"{self.units} */{self.length}")) - return headers - - -class ExpectationFailed(HTTPException): - """*417* `Expectation Failed` - - The server cannot meet the requirements of the Expect request-header. - - .. versionadded:: 0.7 - """ - - code = 417 - description = "The server could not meet the requirements of the Expect header" - - -class ImATeapot(HTTPException): - """*418* `I'm a teapot` - - The server should return this if it is a teapot and someone attempted - to brew coffee with it. - - .. versionadded:: 0.7 - """ - - code = 418 - description = "This server is a teapot, not a coffee machine" - - -class UnprocessableEntity(HTTPException): - """*422* `Unprocessable Entity` - - Used if the request is well formed, but the instructions are otherwise - incorrect. - """ - - code = 422 - description = ( - "The request was well-formed but was unable to be followed due" - " to semantic errors." - ) - - -class Locked(HTTPException): - """*423* `Locked` - - Used if the resource that is being accessed is locked. - """ - - code = 423 - description = "The resource that is being accessed is locked." - - -class FailedDependency(HTTPException): - """*424* `Failed Dependency` - - Used if the method could not be performed on the resource - because the requested action depended on another action and that action failed. - """ - - code = 424 - description = ( - "The method could not be performed on the resource because the" - " requested action depended on another action and that action" - " failed." - ) - - -class PreconditionRequired(HTTPException): - """*428* `Precondition Required` - - The server requires this request to be conditional, typically to prevent - the lost update problem, which is a race condition between two or more - clients attempting to update a resource through PUT or DELETE. By requiring - each client to include a conditional header ("If-Match" or "If-Unmodified- - Since") with the proper value retained from a recent GET request, the - server ensures that each client has at least seen the previous revision of - the resource. - """ - - code = 428 - description = ( - "This request is required to be conditional; try using" - ' "If-Match" or "If-Unmodified-Since".' - ) - - -class _RetryAfter(HTTPException): - """Adds an optional ``retry_after`` parameter which will set the - ``Retry-After`` header. May be an :class:`int` number of seconds or - a :class:`~datetime.datetime`. - """ - - def __init__( - self, - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - retry_after: t.Optional[t.Union[datetime, int]] = None, - ) -> None: - super().__init__(description, response) - self.retry_after = retry_after - - def get_headers( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> t.List[t.Tuple[str, str]]: - headers = super().get_headers(environ, scope) - - if self.retry_after: - if isinstance(self.retry_after, datetime): - from .http import http_date - - value = http_date(self.retry_after) - else: - value = str(self.retry_after) - - headers.append(("Retry-After", value)) - - return headers - - -class TooManyRequests(_RetryAfter): - """*429* `Too Many Requests` - - The server is limiting the rate at which this user receives - responses, and this request exceeds that rate. (The server may use - any convenient method to identify users and their request rates). - The server may include a "Retry-After" header to indicate how long - the user should wait before retrying. - - :param retry_after: If given, set the ``Retry-After`` header to this - value. May be an :class:`int` number of seconds or a - :class:`~datetime.datetime`. - - .. versionchanged:: 1.0 - Added ``retry_after`` parameter. - """ - - code = 429 - description = "This user has exceeded an allotted request count. Try again later." - - -class RequestHeaderFieldsTooLarge(HTTPException): - """*431* `Request Header Fields Too Large` - - The server refuses to process the request because the header fields are too - large. One or more individual fields may be too large, or the set of all - headers is too large. - """ - - code = 431 - description = "One or more header fields exceeds the maximum size." - - -class UnavailableForLegalReasons(HTTPException): - """*451* `Unavailable For Legal Reasons` - - This status code indicates that the server is denying access to the - resource as a consequence of a legal demand. - """ - - code = 451 - description = "Unavailable for legal reasons." - - -class InternalServerError(HTTPException): - """*500* `Internal Server Error` - - Raise if an internal server error occurred. This is a good fallback if an - unknown error occurred in the dispatcher. - - .. versionchanged:: 1.0.0 - Added the :attr:`original_exception` attribute. - """ - - code = 500 - description = ( - "The server encountered an internal error and was unable to" - " complete your request. Either the server is overloaded or" - " there is an error in the application." - ) - - def __init__( - self, - description: t.Optional[str] = None, - response: t.Optional["Response"] = None, - original_exception: t.Optional[BaseException] = None, - ) -> None: - #: The original exception that caused this 500 error. Can be - #: used by frameworks to provide context when handling - #: unexpected errors. - self.original_exception = original_exception - super().__init__(description=description, response=response) - - -class NotImplemented(HTTPException): - """*501* `Not Implemented` - - Raise if the application does not support the action requested by the - browser. - """ - - code = 501 - description = "The server does not support the action requested by the browser." - - -class BadGateway(HTTPException): - """*502* `Bad Gateway` - - If you do proxying in your application you should return this status code - if you received an invalid response from the upstream server it accessed - in attempting to fulfill the request. - """ - - code = 502 - description = ( - "The proxy server received an invalid response from an upstream server." - ) - - -class ServiceUnavailable(_RetryAfter): - """*503* `Service Unavailable` - - Status code you should return if a service is temporarily - unavailable. - - :param retry_after: If given, set the ``Retry-After`` header to this - value. May be an :class:`int` number of seconds or a - :class:`~datetime.datetime`. - - .. versionchanged:: 1.0 - Added ``retry_after`` parameter. - """ - - code = 503 - description = ( - "The server is temporarily unable to service your request due" - " to maintenance downtime or capacity problems. Please try" - " again later." - ) - - -class GatewayTimeout(HTTPException): - """*504* `Gateway Timeout` - - Status code you should return if a connection to an upstream server - times out. - """ - - code = 504 - description = "The connection to an upstream server timed out." - - -class HTTPVersionNotSupported(HTTPException): - """*505* `HTTP Version Not Supported` - - The server does not support the HTTP protocol version used in the request. - """ - - code = 505 - description = ( - "The server does not support the HTTP protocol version used in the request." - ) - - -default_exceptions: t.Dict[int, t.Type[HTTPException]] = {} - - -def _find_exceptions() -> None: - for obj in globals().values(): - try: - is_http_exception = issubclass(obj, HTTPException) - except TypeError: - is_http_exception = False - if not is_http_exception or obj.code is None: - continue - old_obj = default_exceptions.get(obj.code, None) - if old_obj is not None and issubclass(obj, old_obj): - continue - default_exceptions[obj.code] = obj - - -_find_exceptions() -del _find_exceptions - - -class Aborter: - """When passed a dict of code -> exception items it can be used as - callable that raises exceptions. If the first argument to the - callable is an integer it will be looked up in the mapping, if it's - a WSGI application it will be raised in a proxy exception. - - The rest of the arguments are forwarded to the exception constructor. - """ - - def __init__( - self, - mapping: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, - extra: t.Optional[t.Dict[int, t.Type[HTTPException]]] = None, - ) -> None: - if mapping is None: - mapping = default_exceptions - self.mapping = dict(mapping) - if extra is not None: - self.mapping.update(extra) - - def __call__( - self, code: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any - ) -> "te.NoReturn": - from .sansio.response import Response - - if isinstance(code, Response): - raise HTTPException(response=code) - - if code not in self.mapping: - raise LookupError(f"no exception for {code!r}") - - raise self.mapping[code](*args, **kwargs) - - -def abort( - status: t.Union[int, "Response"], *args: t.Any, **kwargs: t.Any -) -> "te.NoReturn": - """Raises an :py:exc:`HTTPException` for the given status code or WSGI - application. - - If a status code is given, it will be looked up in the list of - exceptions and will raise that exception. If passed a WSGI application, - it will wrap it in a proxy WSGI exception and raise that:: - - abort(404) # 404 Not Found - abort(Response('Hello World')) - - """ - _aborter(status, *args, **kwargs) - - -_aborter: Aborter = Aborter() diff --git a/venv/lib/python3.8/site-packages/werkzeug/filesystem.py b/venv/lib/python3.8/site-packages/werkzeug/filesystem.py deleted file mode 100644 index 36a3d12..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/filesystem.py +++ /dev/null @@ -1,55 +0,0 @@ -import codecs -import sys -import typing as t -import warnings - -# We do not trust traditional unixes. -has_likely_buggy_unicode_filesystem = ( - sys.platform.startswith("linux") or "bsd" in sys.platform -) - - -def _is_ascii_encoding(encoding: t.Optional[str]) -> bool: - """Given an encoding this figures out if the encoding is actually ASCII (which - is something we don't actually want in most cases). This is necessary - because ASCII comes under many names such as ANSI_X3.4-1968. - """ - if encoding is None: - return False - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): - """The warning used by Werkzeug to signal a broken filesystem. Will only be - used once per runtime.""" - - -_warned_about_filesystem_encoding = False - - -def get_filesystem_encoding() -> str: - """Returns the filesystem encoding that should be used. Note that this is - different from the Python understanding of the filesystem encoding which - might be deeply flawed. Do not use this value against Python's string APIs - because it might be different. See :ref:`filesystem-encoding` for the exact - behavior. - - The concept of a filesystem encoding in generally is not something you - should rely on. As such if you ever need to use this function except for - writing wrapper code reconsider. - """ - global _warned_about_filesystem_encoding - rv = sys.getfilesystemencoding() - if has_likely_buggy_unicode_filesystem and not rv or _is_ascii_encoding(rv): - if not _warned_about_filesystem_encoding: - warnings.warn( - "Detected a misconfigured UNIX filesystem: Will use" - f" UTF-8 as filesystem encoding instead of {rv!r}", - BrokenFilesystemWarning, - ) - _warned_about_filesystem_encoding = True - return "utf-8" - return rv diff --git a/venv/lib/python3.8/site-packages/werkzeug/formparser.py b/venv/lib/python3.8/site-packages/werkzeug/formparser.py deleted file mode 100644 index 6cb758f..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/formparser.py +++ /dev/null @@ -1,495 +0,0 @@ -import typing as t -import warnings -from functools import update_wrapper -from io import BytesIO -from itertools import chain -from typing import Union - -from . import exceptions -from ._internal import _to_str -from .datastructures import FileStorage -from .datastructures import Headers -from .datastructures import MultiDict -from .http import parse_options_header -from .sansio.multipart import Data -from .sansio.multipart import Epilogue -from .sansio.multipart import Field -from .sansio.multipart import File -from .sansio.multipart import MultipartDecoder -from .sansio.multipart import NeedData -from .urls import url_decode_stream -from .wsgi import _make_chunk_iter -from .wsgi import get_content_length -from .wsgi import get_input_stream - -# there are some platforms where SpooledTemporaryFile is not available. -# In that case we need to provide a fallback. -try: - from tempfile import SpooledTemporaryFile -except ImportError: - from tempfile import TemporaryFile - - SpooledTemporaryFile = None # type: ignore - -if t.TYPE_CHECKING: - import typing as te - from _typeshed.wsgi import WSGIEnvironment - - t_parse_result = t.Tuple[t.IO[bytes], MultiDict, MultiDict] - - class TStreamFactory(te.Protocol): - def __call__( - self, - total_content_length: t.Optional[int], - content_type: t.Optional[str], - filename: t.Optional[str], - content_length: t.Optional[int] = None, - ) -> t.IO[bytes]: - ... - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def _exhaust(stream: t.IO[bytes]) -> None: - bts = stream.read(64 * 1024) - while bts: - bts = stream.read(64 * 1024) - - -def default_stream_factory( - total_content_length: t.Optional[int], - content_type: t.Optional[str], - filename: t.Optional[str], - content_length: t.Optional[int] = None, -) -> t.IO[bytes]: - max_size = 1024 * 500 - - if SpooledTemporaryFile is not None: - return t.cast(t.IO[bytes], SpooledTemporaryFile(max_size=max_size, mode="rb+")) - elif total_content_length is None or total_content_length > max_size: - return t.cast(t.IO[bytes], TemporaryFile("rb+")) - - return BytesIO() - - -def parse_form_data( - environ: "WSGIEnvironment", - stream_factory: t.Optional["TStreamFactory"] = None, - charset: str = "utf-8", - errors: str = "replace", - max_form_memory_size: t.Optional[int] = None, - max_content_length: t.Optional[int] = None, - cls: t.Optional[t.Type[MultiDict]] = None, - silent: bool = True, -) -> "t_parse_result": - """Parse the form data in the environ and return it as tuple in the form - ``(stream, form, files)``. You should only call this method if the - transport method is `POST`, `PUT`, or `PATCH`. - - If the mimetype of the data transmitted is `multipart/form-data` the - files multidict will be filled with `FileStorage` objects. If the - mimetype is unknown the input stream is wrapped and returned as first - argument, else the stream is empty. - - This is a shortcut for the common usage of :class:`FormDataParser`. - - Have a look at :doc:`/request_data` for more details. - - .. versionadded:: 0.5 - The `max_form_memory_size`, `max_content_length` and - `cls` parameters were added. - - .. versionadded:: 0.5.1 - The optional `silent` flag was added. - - :param environ: the WSGI environment to be used for parsing. - :param stream_factory: An optional callable that returns a new read and - writeable file descriptor. This callable works - the same as :meth:`Response._get_file_stream`. - :param charset: The character set for URL and url encoded form data. - :param errors: The encoding error behavior. - :param max_form_memory_size: the maximum number of bytes to be accepted for - in-memory stored form data. If the data - exceeds the value specified an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param max_content_length: If this is provided and the transmitted data - is longer than this value an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param silent: If set to False parsing errors will not be caught. - :return: A tuple in the form ``(stream, form, files)``. - """ - return FormDataParser( - stream_factory, - charset, - errors, - max_form_memory_size, - max_content_length, - cls, - silent, - ).parse_from_environ(environ) - - -def exhaust_stream(f: F) -> F: - """Helper decorator for methods that exhausts the stream on return.""" - - def wrapper(self, stream, *args, **kwargs): # type: ignore - try: - return f(self, stream, *args, **kwargs) - finally: - exhaust = getattr(stream, "exhaust", None) - - if exhaust is not None: - exhaust() - else: - while True: - chunk = stream.read(1024 * 64) - - if not chunk: - break - - return update_wrapper(t.cast(F, wrapper), f) - - -class FormDataParser: - """This class implements parsing of form data for Werkzeug. By itself - it can parse multipart and url encoded form data. It can be subclassed - and extended but for most mimetypes it is a better idea to use the - untouched stream and expose it as separate attributes on a request - object. - - .. versionadded:: 0.8 - - :param stream_factory: An optional callable that returns a new read and - writeable file descriptor. This callable works - the same as :meth:`Response._get_file_stream`. - :param charset: The character set for URL and url encoded form data. - :param errors: The encoding error behavior. - :param max_form_memory_size: the maximum number of bytes to be accepted for - in-memory stored form data. If the data - exceeds the value specified an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param max_content_length: If this is provided and the transmitted data - is longer than this value an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param silent: If set to False parsing errors will not be caught. - """ - - def __init__( - self, - stream_factory: t.Optional["TStreamFactory"] = None, - charset: str = "utf-8", - errors: str = "replace", - max_form_memory_size: t.Optional[int] = None, - max_content_length: t.Optional[int] = None, - cls: t.Optional[t.Type[MultiDict]] = None, - silent: bool = True, - ) -> None: - if stream_factory is None: - stream_factory = default_stream_factory - - self.stream_factory = stream_factory - self.charset = charset - self.errors = errors - self.max_form_memory_size = max_form_memory_size - self.max_content_length = max_content_length - - if cls is None: - cls = MultiDict - - self.cls = cls - self.silent = silent - - def get_parse_func( - self, mimetype: str, options: t.Dict[str, str] - ) -> t.Optional[ - t.Callable[ - ["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]], - "t_parse_result", - ] - ]: - return self.parse_functions.get(mimetype) - - def parse_from_environ(self, environ: "WSGIEnvironment") -> "t_parse_result": - """Parses the information from the environment as form data. - - :param environ: the WSGI environment to be used for parsing. - :return: A tuple in the form ``(stream, form, files)``. - """ - content_type = environ.get("CONTENT_TYPE", "") - content_length = get_content_length(environ) - mimetype, options = parse_options_header(content_type) - return self.parse(get_input_stream(environ), mimetype, content_length, options) - - def parse( - self, - stream: t.IO[bytes], - mimetype: str, - content_length: t.Optional[int], - options: t.Optional[t.Dict[str, str]] = None, - ) -> "t_parse_result": - """Parses the information from the given stream, mimetype, - content length and mimetype parameters. - - :param stream: an input stream - :param mimetype: the mimetype of the data - :param content_length: the content length of the incoming data - :param options: optional mimetype parameters (used for - the multipart boundary for instance) - :return: A tuple in the form ``(stream, form, files)``. - """ - if ( - self.max_content_length is not None - and content_length is not None - and content_length > self.max_content_length - ): - # if the input stream is not exhausted, firefox reports Connection Reset - _exhaust(stream) - raise exceptions.RequestEntityTooLarge() - - if options is None: - options = {} - - parse_func = self.get_parse_func(mimetype, options) - - if parse_func is not None: - try: - return parse_func(self, stream, mimetype, content_length, options) - except ValueError: - if not self.silent: - raise - - return stream, self.cls(), self.cls() - - @exhaust_stream - def _parse_multipart( - self, - stream: t.IO[bytes], - mimetype: str, - content_length: t.Optional[int], - options: t.Dict[str, str], - ) -> "t_parse_result": - parser = MultiPartParser( - self.stream_factory, - self.charset, - self.errors, - max_form_memory_size=self.max_form_memory_size, - cls=self.cls, - ) - boundary = options.get("boundary", "").encode("ascii") - - if not boundary: - raise ValueError("Missing boundary") - - form, files = parser.parse(stream, boundary, content_length) - return stream, form, files - - @exhaust_stream - def _parse_urlencoded( - self, - stream: t.IO[bytes], - mimetype: str, - content_length: t.Optional[int], - options: t.Dict[str, str], - ) -> "t_parse_result": - if ( - self.max_form_memory_size is not None - and content_length is not None - and content_length > self.max_form_memory_size - ): - # if the input stream is not exhausted, firefox reports Connection Reset - _exhaust(stream) - raise exceptions.RequestEntityTooLarge() - - form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls) - return stream, form, self.cls() - - #: mapping of mimetypes to parsing functions - parse_functions: t.Dict[ - str, - t.Callable[ - ["FormDataParser", t.IO[bytes], str, t.Optional[int], t.Dict[str, str]], - "t_parse_result", - ], - ] = { - "multipart/form-data": _parse_multipart, - "application/x-www-form-urlencoded": _parse_urlencoded, - "application/x-url-encoded": _parse_urlencoded, - } - - -def _line_parse(line: str) -> t.Tuple[str, bool]: - """Removes line ending characters and returns a tuple (`stripped_line`, - `is_terminated`). - """ - if line[-2:] == "\r\n": - return line[:-2], True - - elif line[-1:] in {"\r", "\n"}: - return line[:-1], True - - return line, False - - -def parse_multipart_headers(iterable: t.Iterable[bytes]) -> Headers: - """Parses multipart headers from an iterable that yields lines (including - the trailing newline symbol). The iterable has to be newline terminated. - The iterable will stop at the line where the headers ended so it can be - further consumed. - :param iterable: iterable of strings that are newline terminated - """ - warnings.warn( - "'parse_multipart_headers' is deprecated and will be removed in" - " Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - result: t.List[t.Tuple[str, str]] = [] - - for b_line in iterable: - line = _to_str(b_line) - line, line_terminated = _line_parse(line) - - if not line_terminated: - raise ValueError("unexpected end of line in multipart header") - - if not line: - break - elif line[0] in " \t" and result: - key, value = result[-1] - result[-1] = (key, f"{value}\n {line[1:]}") - else: - parts = line.split(":", 1) - - if len(parts) == 2: - result.append((parts[0].strip(), parts[1].strip())) - - # we link the list to the headers, no need to create a copy, the - # list was not shared anyways. - return Headers(result) - - -class MultiPartParser: - def __init__( - self, - stream_factory: t.Optional["TStreamFactory"] = None, - charset: str = "utf-8", - errors: str = "replace", - max_form_memory_size: t.Optional[int] = None, - cls: t.Optional[t.Type[MultiDict]] = None, - buffer_size: int = 64 * 1024, - ) -> None: - self.charset = charset - self.errors = errors - self.max_form_memory_size = max_form_memory_size - - if stream_factory is None: - stream_factory = default_stream_factory - - self.stream_factory = stream_factory - - if cls is None: - cls = MultiDict - - self.cls = cls - - self.buffer_size = buffer_size - - def fail(self, message: str) -> "te.NoReturn": - raise ValueError(message) - - def get_part_charset(self, headers: Headers) -> str: - # Figure out input charset for current part - content_type = headers.get("content-type") - - if content_type: - mimetype, ct_params = parse_options_header(content_type) - return ct_params.get("charset", self.charset) - - return self.charset - - def start_file_streaming( - self, event: File, total_content_length: t.Optional[int] - ) -> t.IO[bytes]: - content_type = event.headers.get("content-type") - - try: - content_length = int(event.headers["content-length"]) - except (KeyError, ValueError): - content_length = 0 - - container = self.stream_factory( - total_content_length=total_content_length, - filename=event.filename, - content_type=content_type, - content_length=content_length, - ) - return container - - def parse( - self, stream: t.IO[bytes], boundary: bytes, content_length: t.Optional[int] - ) -> t.Tuple[MultiDict, MultiDict]: - container: t.Union[t.IO[bytes], t.List[bytes]] - _write: t.Callable[[bytes], t.Any] - - iterator = chain( - _make_chunk_iter( - stream, - limit=content_length, - buffer_size=self.buffer_size, - ), - [None], - ) - - parser = MultipartDecoder(boundary, self.max_form_memory_size) - - fields = [] - files = [] - - current_part: Union[Field, File] - for data in iterator: - parser.receive_data(data) - event = parser.next_event() - while not isinstance(event, (Epilogue, NeedData)): - if isinstance(event, Field): - current_part = event - container = [] - _write = container.append - elif isinstance(event, File): - current_part = event - container = self.start_file_streaming(event, content_length) - _write = container.write - elif isinstance(event, Data): - _write(event.data) - if not event.more_data: - if isinstance(current_part, Field): - value = b"".join(container).decode( - self.get_part_charset(current_part.headers), self.errors - ) - fields.append((current_part.name, value)) - else: - container = t.cast(t.IO[bytes], container) - container.seek(0) - files.append( - ( - current_part.name, - FileStorage( - container, - current_part.filename, - current_part.name, - headers=current_part.headers, - ), - ) - ) - - event = parser.next_event() - - return self.cls(fields), self.cls(files) diff --git a/venv/lib/python3.8/site-packages/werkzeug/http.py b/venv/lib/python3.8/site-packages/werkzeug/http.py deleted file mode 100644 index ca48fe2..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/http.py +++ /dev/null @@ -1,1388 +0,0 @@ -import base64 -import email.utils -import re -import typing -import typing as t -import warnings -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from datetime import timezone -from enum import Enum -from hashlib import sha1 -from time import mktime -from time import struct_time -from urllib.parse import unquote_to_bytes as _unquote -from urllib.request import parse_http_list as _parse_list_header - -from ._internal import _cookie_parse_impl -from ._internal import _cookie_quote -from ._internal import _make_cookie_domain -from ._internal import _to_bytes -from ._internal import _to_str -from ._internal import _wsgi_decoding_dance -from werkzeug._internal import _dt_as_utc - -if t.TYPE_CHECKING: - import typing_extensions as te - from _typeshed.wsgi import WSGIEnvironment - -# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231 -_accept_re = re.compile( - r""" - ( # media-range capturing-parenthesis - [^\s;,]+ # type/subtype - (?:[ \t]*;[ \t]* # ";" - (?: # parameter non-capturing-parenthesis - [^\s;,q][^\s;,]* # token that doesn't start with "q" - | # or - q[^\s;,=][^\s;,]* # token that is more than just "q" - ) - )* # zero or more parameters - ) # end of media-range - (?:[ \t]*;[ \t]*q= # weight is a "q" parameter - (\d*(?:\.\d+)?) # qvalue capturing-parentheses - [^,]* # "extension" accept params: who cares? - )? # accept params are optional - """, - re.VERBOSE, -) -_token_chars = frozenset( - "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~" -) -_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') -_option_header_piece_re = re.compile( - r""" - ;\s*,?\s* # newlines were replaced with commas - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^\s;,=*]+ # token - ) - (?:\*(?P\d+))? # *1, optional continuation index - \s* - (?: # optionally followed by =value - (?: # equals sign, possibly with encoding - \*\s*=\s* # * indicates extended notation - (?: # optional encoding - (?P[^\s]+?) - '(?P[^\s]*?)' - )? - | - =\s* # basic notation - ) - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^;,]+ # token - )? - )? - \s* - """, - flags=re.VERBOSE, -) -_option_header_start_mime_type = re.compile(r",\s*([^;,\s]+)([;,]\s*.+)?") -_entity_headers = frozenset( - [ - "allow", - "content-encoding", - "content-language", - "content-length", - "content-location", - "content-md5", - "content-range", - "content-type", - "expires", - "last-modified", - ] -) -_hop_by_hop_headers = frozenset( - [ - "connection", - "keep-alive", - "proxy-authenticate", - "proxy-authorization", - "te", - "trailer", - "transfer-encoding", - "upgrade", - ] -) -HTTP_STATUS_CODES = { - 100: "Continue", - 101: "Switching Protocols", - 102: "Processing", - 103: "Early Hints", # see RFC 8297 - 200: "OK", - 201: "Created", - 202: "Accepted", - 203: "Non Authoritative Information", - 204: "No Content", - 205: "Reset Content", - 206: "Partial Content", - 207: "Multi Status", - 208: "Already Reported", # see RFC 5842 - 226: "IM Used", # see RFC 3229 - 300: "Multiple Choices", - 301: "Moved Permanently", - 302: "Found", - 303: "See Other", - 304: "Not Modified", - 305: "Use Proxy", - 306: "Switch Proxy", # unused - 307: "Temporary Redirect", - 308: "Permanent Redirect", - 400: "Bad Request", - 401: "Unauthorized", - 402: "Payment Required", # unused - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 406: "Not Acceptable", - 407: "Proxy Authentication Required", - 408: "Request Timeout", - 409: "Conflict", - 410: "Gone", - 411: "Length Required", - 412: "Precondition Failed", - 413: "Request Entity Too Large", - 414: "Request URI Too Long", - 415: "Unsupported Media Type", - 416: "Requested Range Not Satisfiable", - 417: "Expectation Failed", - 418: "I'm a teapot", # see RFC 2324 - 421: "Misdirected Request", # see RFC 7540 - 422: "Unprocessable Entity", - 423: "Locked", - 424: "Failed Dependency", - 425: "Too Early", # see RFC 8470 - 426: "Upgrade Required", - 428: "Precondition Required", # see RFC 6585 - 429: "Too Many Requests", - 431: "Request Header Fields Too Large", - 449: "Retry With", # proprietary MS extension - 451: "Unavailable For Legal Reasons", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - 504: "Gateway Timeout", - 505: "HTTP Version Not Supported", - 506: "Variant Also Negotiates", # see RFC 2295 - 507: "Insufficient Storage", - 508: "Loop Detected", # see RFC 5842 - 510: "Not Extended", - 511: "Network Authentication Failed", -} - - -class COEP(Enum): - """Cross Origin Embedder Policies""" - - UNSAFE_NONE = "unsafe-none" - REQUIRE_CORP = "require-corp" - - -class COOP(Enum): - """Cross Origin Opener Policies""" - - UNSAFE_NONE = "unsafe-none" - SAME_ORIGIN_ALLOW_POPUPS = "same-origin-allow-popups" - SAME_ORIGIN = "same-origin" - - -def quote_header_value( - value: t.Union[str, int], extra_chars: str = "", allow_token: bool = True -) -> str: - """Quote a header value if necessary. - - .. versionadded:: 0.5 - - :param value: the value to quote. - :param extra_chars: a list of extra characters to skip quoting. - :param allow_token: if this is enabled token values are returned - unchanged. - """ - if isinstance(value, bytes): - value = value.decode("latin1") - value = str(value) - if allow_token: - token_chars = _token_chars | set(extra_chars) - if set(value).issubset(token_chars): - return value - value = value.replace("\\", "\\\\").replace('"', '\\"') - return f'"{value}"' - - -def unquote_header_value(value: str, is_filename: bool = False) -> str: - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - - .. versionadded:: 0.5 - - :param value: the header value to unquote. - :param is_filename: The value represents a filename or path. - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != "\\\\": - return value.replace("\\\\", "\\").replace('\\"', '"') - return value - - -def dump_options_header( - header: t.Optional[str], options: t.Mapping[str, t.Optional[t.Union[str, int]]] -) -> str: - """The reverse function to :func:`parse_options_header`. - - :param header: the header to dump - :param options: a dict of options to append. - """ - segments = [] - if header is not None: - segments.append(header) - for key, value in options.items(): - if value is None: - segments.append(key) - else: - segments.append(f"{key}={quote_header_value(value)}") - return "; ".join(segments) - - -def dump_header( - iterable: t.Union[t.Dict[str, t.Union[str, int]], t.Iterable[str]], - allow_token: bool = True, -) -> str: - """Dump an HTTP header again. This is the reversal of - :func:`parse_list_header`, :func:`parse_set_header` and - :func:`parse_dict_header`. This also quotes strings that include an - equals sign unless you pass it as dict of key, value pairs. - - >>> dump_header({'foo': 'bar baz'}) - 'foo="bar baz"' - >>> dump_header(('foo', 'bar baz')) - 'foo, "bar baz"' - - :param iterable: the iterable or dict of values to quote. - :param allow_token: if set to `False` tokens as values are disallowed. - See :func:`quote_header_value` for more details. - """ - if isinstance(iterable, dict): - items = [] - for key, value in iterable.items(): - if value is None: - items.append(key) - else: - items.append( - f"{key}={quote_header_value(value, allow_token=allow_token)}" - ) - else: - items = [quote_header_value(x, allow_token=allow_token) for x in iterable] - return ", ".join(items) - - -def dump_csp_header(header: "ds.ContentSecurityPolicy") -> str: - """Dump a Content Security Policy header. - - These are structured into policies such as "default-src 'self'; - script-src 'self'". - - .. versionadded:: 1.0.0 - Support for Content Security Policy headers was added. - - """ - return "; ".join(f"{key} {value}" for key, value in header.items()) - - -def parse_list_header(value: str) -> t.List[str]: - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Quotes are removed automatically after parsing. - - It basically works like :func:`parse_set_header` just that items - may appear multiple times and case sensitivity is preserved. - - The return value is a standard :class:`list`: - - >>> parse_list_header('token, "quoted value"') - ['token', 'quoted value'] - - To create a header from the :class:`list` again, use the - :func:`dump_header` function. - - :param value: a string with a list header. - :return: :class:`list` - """ - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = unquote_header_value(item[1:-1]) - result.append(item) - return result - - -def parse_dict_header(value: str, cls: t.Type[dict] = dict) -> t.Dict[str, str]: - """Parse lists of key, value pairs as described by RFC 2068 Section 2 and - convert them into a python dict (or any other mapping object created from - the type with a dict like interface provided by the `cls` argument): - - >>> d = parse_dict_header('foo="is a fish", bar="as well"') - >>> type(d) is dict - True - >>> sorted(d.items()) - [('bar', 'as well'), ('foo', 'is a fish')] - - If there is no value for a key it will be `None`: - - >>> parse_dict_header('key_without_value') - {'key_without_value': None} - - To create a header from the :class:`dict` again, use the - :func:`dump_header` function. - - .. versionchanged:: 0.9 - Added support for `cls` argument. - - :param value: a string with a dict header. - :param cls: callable to use for storage of parsed results. - :return: an instance of `cls` - """ - result = cls() - if isinstance(value, bytes): - value = value.decode("latin1") - for item in _parse_list_header(value): - if "=" not in item: - result[item] = None - continue - name, value = item.split("=", 1) - if value[:1] == value[-1:] == '"': - value = unquote_header_value(value[1:-1]) - result[name] = value - return result - - -@typing.overload -def parse_options_header( - value: t.Optional[str], multiple: "te.Literal[False]" = False -) -> t.Tuple[str, t.Dict[str, str]]: - ... - - -@typing.overload -def parse_options_header( - value: t.Optional[str], multiple: "te.Literal[True]" -) -> t.Tuple[t.Any, ...]: - ... - - -def parse_options_header( - value: t.Optional[str], multiple: bool = False -) -> t.Union[t.Tuple[str, t.Dict[str, str]], t.Tuple[t.Any, ...]]: - """Parse a ``Content-Type`` like header into a tuple with the content - type and the options: - - >>> parse_options_header('text/html; charset=utf8') - ('text/html', {'charset': 'utf8'}) - - This should not be used to parse ``Cache-Control`` like headers that use - a slightly different format. For these headers use the - :func:`parse_dict_header` function. - - .. versionchanged:: 0.15 - :rfc:`2231` parameter continuations are handled. - - .. versionadded:: 0.5 - - :param value: the header to parse. - :param multiple: Whether try to parse and return multiple MIME types - :return: (mimetype, options) or (mimetype, options, mimetype, options, …) - if multiple=True - """ - if not value: - return "", {} - - result: t.List[t.Any] = [] - - value = "," + value.replace("\n", ",") - while value: - match = _option_header_start_mime_type.match(value) - if not match: - break - result.append(match.group(1)) # mimetype - options: t.Dict[str, str] = {} - # Parse options - rest = match.group(2) - encoding: t.Optional[str] - continued_encoding: t.Optional[str] = None - while rest: - optmatch = _option_header_piece_re.match(rest) - if not optmatch: - break - option, count, encoding, language, option_value = optmatch.groups() - # Continuations don't have to supply the encoding after the - # first line. If we're in a continuation, track the current - # encoding to use for subsequent lines. Reset it when the - # continuation ends. - if not count: - continued_encoding = None - else: - if not encoding: - encoding = continued_encoding - continued_encoding = encoding - option = unquote_header_value(option) - if option_value is not None: - option_value = unquote_header_value(option_value, option == "filename") - if encoding is not None: - option_value = _unquote(option_value).decode(encoding) - if count: - # Continuations append to the existing value. For - # simplicity, this ignores the possibility of - # out-of-order indices, which shouldn't happen anyway. - options[option] = options.get(option, "") + option_value - else: - options[option] = option_value - rest = rest[optmatch.end() :] - result.append(options) - if multiple is False: - return tuple(result) - value = rest - - return tuple(result) if result else ("", {}) - - -_TAnyAccept = t.TypeVar("_TAnyAccept", bound="ds.Accept") - - -@typing.overload -def parse_accept_header(value: t.Optional[str]) -> "ds.Accept": - ... - - -@typing.overload -def parse_accept_header( - value: t.Optional[str], cls: t.Type[_TAnyAccept] -) -> _TAnyAccept: - ... - - -def parse_accept_header( - value: t.Optional[str], cls: t.Optional[t.Type[_TAnyAccept]] = None -) -> _TAnyAccept: - """Parses an HTTP Accept-* header. This does not implement a complete - valid algorithm but one that supports at least value and quality - extraction. - - Returns a new :class:`Accept` object (basically a list of ``(value, quality)`` - tuples sorted by the quality with some additional accessor methods). - - The second parameter can be a subclass of :class:`Accept` that is created - with the parsed values and returned. - - :param value: the accept header string to be parsed. - :param cls: the wrapper class for the return value (can be - :class:`Accept` or a subclass thereof) - :return: an instance of `cls`. - """ - if cls is None: - cls = t.cast(t.Type[_TAnyAccept], ds.Accept) - - if not value: - return cls(None) - - result = [] - for match in _accept_re.finditer(value): - quality_match = match.group(2) - if not quality_match: - quality: float = 1 - else: - quality = max(min(float(quality_match), 1), 0) - result.append((match.group(1), quality)) - return cls(result) - - -_TAnyCC = t.TypeVar("_TAnyCC", bound="ds._CacheControl") -_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] - - -@typing.overload -def parse_cache_control_header( - value: t.Optional[str], on_update: _t_cc_update, cls: None = None -) -> "ds.RequestCacheControl": - ... - - -@typing.overload -def parse_cache_control_header( - value: t.Optional[str], on_update: _t_cc_update, cls: t.Type[_TAnyCC] -) -> _TAnyCC: - ... - - -def parse_cache_control_header( - value: t.Optional[str], - on_update: _t_cc_update = None, - cls: t.Optional[t.Type[_TAnyCC]] = None, -) -> _TAnyCC: - """Parse a cache control header. The RFC differs between response and - request cache control, this method does not. It's your responsibility - to not use the wrong control statements. - - .. versionadded:: 0.5 - The `cls` was added. If not specified an immutable - :class:`~werkzeug.datastructures.RequestCacheControl` is returned. - - :param value: a cache control header to be parsed. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.CacheControl` - object is changed. - :param cls: the class for the returned object. By default - :class:`~werkzeug.datastructures.RequestCacheControl` is used. - :return: a `cls` object. - """ - if cls is None: - cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) - - if not value: - return cls((), on_update) - - return cls(parse_dict_header(value), on_update) - - -_TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") -_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] - - -@typing.overload -def parse_csp_header( - value: t.Optional[str], on_update: _t_csp_update, cls: None = None -) -> "ds.ContentSecurityPolicy": - ... - - -@typing.overload -def parse_csp_header( - value: t.Optional[str], on_update: _t_csp_update, cls: t.Type[_TAnyCSP] -) -> _TAnyCSP: - ... - - -def parse_csp_header( - value: t.Optional[str], - on_update: _t_csp_update = None, - cls: t.Optional[t.Type[_TAnyCSP]] = None, -) -> _TAnyCSP: - """Parse a Content Security Policy header. - - .. versionadded:: 1.0.0 - Support for Content Security Policy headers was added. - - :param value: a csp header to be parsed. - :param on_update: an optional callable that is called every time a value - on the object is changed. - :param cls: the class for the returned object. By default - :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used. - :return: a `cls` object. - """ - if cls is None: - cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) - - if value is None: - return cls((), on_update) - - items = [] - - for policy in value.split(";"): - policy = policy.strip() - - # Ignore badly formatted policies (no space) - if " " in policy: - directive, value = policy.strip().split(" ", 1) - items.append((directive.strip(), value.strip())) - - return cls(items, on_update) - - -def parse_set_header( - value: t.Optional[str], - on_update: t.Optional[t.Callable[["ds.HeaderSet"], None]] = None, -) -> "ds.HeaderSet": - """Parse a set-like header and return a - :class:`~werkzeug.datastructures.HeaderSet` object: - - >>> hs = parse_set_header('token, "quoted value"') - - The return value is an object that treats the items case-insensitively - and keeps the order of the items: - - >>> 'TOKEN' in hs - True - >>> hs.index('quoted value') - 1 - >>> hs - HeaderSet(['token', 'quoted value']) - - To create a header from the :class:`HeaderSet` again, use the - :func:`dump_header` function. - - :param value: a set header to be parsed. - :param on_update: an optional callable that is called every time a - value on the :class:`~werkzeug.datastructures.HeaderSet` - object is changed. - :return: a :class:`~werkzeug.datastructures.HeaderSet` - """ - if not value: - return ds.HeaderSet(None, on_update) - return ds.HeaderSet(parse_list_header(value), on_update) - - -def parse_authorization_header( - value: t.Optional[str], -) -> t.Optional["ds.Authorization"]: - """Parse an HTTP basic/digest authorization header transmitted by the web - browser. The return value is either `None` if the header was invalid or - not given, otherwise an :class:`~werkzeug.datastructures.Authorization` - object. - - :param value: the authorization header to parse. - :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. - """ - if not value: - return None - value = _wsgi_decoding_dance(value) - try: - auth_type, auth_info = value.split(None, 1) - auth_type = auth_type.lower() - except ValueError: - return None - if auth_type == "basic": - try: - username, password = base64.b64decode(auth_info).split(b":", 1) - except Exception: - return None - try: - return ds.Authorization( - "basic", - { - "username": _to_str(username, "utf-8"), - "password": _to_str(password, "utf-8"), - }, - ) - except UnicodeDecodeError: - return None - elif auth_type == "digest": - auth_map = parse_dict_header(auth_info) - for key in "username", "realm", "nonce", "uri", "response": - if key not in auth_map: - return None - if "qop" in auth_map: - if not auth_map.get("nc") or not auth_map.get("cnonce"): - return None - return ds.Authorization("digest", auth_map) - return None - - -def parse_www_authenticate_header( - value: t.Optional[str], - on_update: t.Optional[t.Callable[["ds.WWWAuthenticate"], None]] = None, -) -> "ds.WWWAuthenticate": - """Parse an HTTP WWW-Authenticate header into a - :class:`~werkzeug.datastructures.WWWAuthenticate` object. - - :param value: a WWW-Authenticate header to parse. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.WWWAuthenticate` - object is changed. - :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. - """ - if not value: - return ds.WWWAuthenticate(on_update=on_update) - try: - auth_type, auth_info = value.split(None, 1) - auth_type = auth_type.lower() - except (ValueError, AttributeError): - return ds.WWWAuthenticate(value.strip().lower(), on_update=on_update) - return ds.WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update) - - -def parse_if_range_header(value: t.Optional[str]) -> "ds.IfRange": - """Parses an if-range header which can be an etag or a date. Returns - a :class:`~werkzeug.datastructures.IfRange` object. - - .. versionchanged:: 2.0 - If the value represents a datetime, it is timezone-aware. - - .. versionadded:: 0.7 - """ - if not value: - return ds.IfRange() - date = parse_date(value) - if date is not None: - return ds.IfRange(date=date) - # drop weakness information - return ds.IfRange(unquote_etag(value)[0]) - - -def parse_range_header( - value: t.Optional[str], make_inclusive: bool = True -) -> t.Optional["ds.Range"]: - """Parses a range header into a :class:`~werkzeug.datastructures.Range` - object. If the header is missing or malformed `None` is returned. - `ranges` is a list of ``(start, stop)`` tuples where the ranges are - non-inclusive. - - .. versionadded:: 0.7 - """ - if not value or "=" not in value: - return None - - ranges = [] - last_end = 0 - units, rng = value.split("=", 1) - units = units.strip().lower() - - for item in rng.split(","): - item = item.strip() - if "-" not in item: - return None - if item.startswith("-"): - if last_end < 0: - return None - try: - begin = int(item) - except ValueError: - return None - end = None - last_end = -1 - elif "-" in item: - begin_str, end_str = item.split("-", 1) - begin_str = begin_str.strip() - end_str = end_str.strip() - if not begin_str.isdigit(): - return None - begin = int(begin_str) - if begin < last_end or last_end < 0: - return None - if end_str: - if not end_str.isdigit(): - return None - end = int(end_str) + 1 - if begin >= end: - return None - else: - end = None - last_end = end if end is not None else -1 - ranges.append((begin, end)) - - return ds.Range(units, ranges) - - -def parse_content_range_header( - value: t.Optional[str], - on_update: t.Optional[t.Callable[["ds.ContentRange"], None]] = None, -) -> t.Optional["ds.ContentRange"]: - """Parses a range header into a - :class:`~werkzeug.datastructures.ContentRange` object or `None` if - parsing is not possible. - - .. versionadded:: 0.7 - - :param value: a content range header to be parsed. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.ContentRange` - object is changed. - """ - if value is None: - return None - try: - units, rangedef = (value or "").strip().split(None, 1) - except ValueError: - return None - - if "/" not in rangedef: - return None - rng, length_str = rangedef.split("/", 1) - if length_str == "*": - length = None - elif length_str.isdigit(): - length = int(length_str) - else: - return None - - if rng == "*": - return ds.ContentRange(units, None, None, length, on_update=on_update) - elif "-" not in rng: - return None - - start_str, stop_str = rng.split("-", 1) - try: - start = int(start_str) - stop = int(stop_str) + 1 - except ValueError: - return None - - if is_byte_range_valid(start, stop, length): - return ds.ContentRange(units, start, stop, length, on_update=on_update) - - return None - - -def quote_etag(etag: str, weak: bool = False) -> str: - """Quote an etag. - - :param etag: the etag to quote. - :param weak: set to `True` to tag it "weak". - """ - if '"' in etag: - raise ValueError("invalid etag") - etag = f'"{etag}"' - if weak: - etag = f"W/{etag}" - return etag - - -def unquote_etag( - etag: t.Optional[str], -) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: - """Unquote a single etag: - - >>> unquote_etag('W/"bar"') - ('bar', True) - >>> unquote_etag('"bar"') - ('bar', False) - - :param etag: the etag identifier to unquote. - :return: a ``(etag, weak)`` tuple. - """ - if not etag: - return None, None - etag = etag.strip() - weak = False - if etag.startswith(("W/", "w/")): - weak = True - etag = etag[2:] - if etag[:1] == etag[-1:] == '"': - etag = etag[1:-1] - return etag, weak - - -def parse_etags(value: t.Optional[str]) -> "ds.ETags": - """Parse an etag header. - - :param value: the tag header to parse - :return: an :class:`~werkzeug.datastructures.ETags` object. - """ - if not value: - return ds.ETags() - strong = [] - weak = [] - end = len(value) - pos = 0 - while pos < end: - match = _etag_re.match(value, pos) - if match is None: - break - is_weak, quoted, raw = match.groups() - if raw == "*": - return ds.ETags(star_tag=True) - elif quoted: - raw = quoted - if is_weak: - weak.append(raw) - else: - strong.append(raw) - pos = match.end() - return ds.ETags(strong, weak) - - -def generate_etag(data: bytes) -> str: - """Generate an etag for some data. - - .. versionchanged:: 2.0 - Use SHA-1. MD5 may not be available in some environments. - """ - return sha1(data).hexdigest() - - -def parse_date(value: t.Optional[str]) -> t.Optional[datetime]: - """Parse an :rfc:`2822` date into a timezone-aware - :class:`datetime.datetime` object, or ``None`` if parsing fails. - - This is a wrapper for :func:`email.utils.parsedate_to_datetime`. It - returns ``None`` if parsing fails instead of raising an exception, - and always returns a timezone-aware datetime object. If the string - doesn't have timezone information, it is assumed to be UTC. - - :param value: A string with a supported date format. - - .. versionchanged:: 2.0 - Return a timezone-aware datetime object. Use - ``email.utils.parsedate_to_datetime``. - """ - if value is None: - return None - - try: - dt = email.utils.parsedate_to_datetime(value) - except (TypeError, ValueError): - return None - - if dt.tzinfo is None: - return dt.replace(tzinfo=timezone.utc) - - return dt - - -def cookie_date( - expires: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None -) -> str: - """Format a datetime object or timestamp into an :rfc:`2822` date - string for ``Set-Cookie expires``. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :func:`http_date` instead. - """ - warnings.warn( - "'cookie_date' is deprecated and will be removed in Werkzeug" - " 2.1. Use 'http_date' instead.", - DeprecationWarning, - stacklevel=2, - ) - return http_date(expires) - - -def http_date( - timestamp: t.Optional[t.Union[datetime, date, int, float, struct_time]] = None -) -> str: - """Format a datetime object or timestamp into an :rfc:`2822` date - string. - - This is a wrapper for :func:`email.utils.format_datetime`. It - assumes naive datetime objects are in UTC instead of raising an - exception. - - :param timestamp: The datetime or timestamp to format. Defaults to - the current time. - - .. versionchanged:: 2.0 - Use ``email.utils.format_datetime``. Accept ``date`` objects. - """ - if isinstance(timestamp, date): - if not isinstance(timestamp, datetime): - # Assume plain date is midnight UTC. - timestamp = datetime.combine(timestamp, time(), tzinfo=timezone.utc) - else: - # Ensure datetime is timezone-aware. - timestamp = _dt_as_utc(timestamp) - - return email.utils.format_datetime(timestamp, usegmt=True) - - if isinstance(timestamp, struct_time): - timestamp = mktime(timestamp) - - return email.utils.formatdate(timestamp, usegmt=True) - - -def parse_age(value: t.Optional[str] = None) -> t.Optional[timedelta]: - """Parses a base-10 integer count of seconds into a timedelta. - - If parsing fails, the return value is `None`. - - :param value: a string consisting of an integer represented in base-10 - :return: a :class:`datetime.timedelta` object or `None`. - """ - if not value: - return None - try: - seconds = int(value) - except ValueError: - return None - if seconds < 0: - return None - try: - return timedelta(seconds=seconds) - except OverflowError: - return None - - -def dump_age(age: t.Optional[t.Union[timedelta, int]] = None) -> t.Optional[str]: - """Formats the duration as a base-10 integer. - - :param age: should be an integer number of seconds, - a :class:`datetime.timedelta` object, or, - if the age is unknown, `None` (default). - """ - if age is None: - return None - if isinstance(age, timedelta): - age = int(age.total_seconds()) - else: - age = int(age) - - if age < 0: - raise ValueError("age cannot be negative") - - return str(age) - - -def is_resource_modified( - environ: "WSGIEnvironment", - etag: t.Optional[str] = None, - data: t.Optional[bytes] = None, - last_modified: t.Optional[t.Union[datetime, str]] = None, - ignore_if_range: bool = True, -) -> bool: - """Convenience method for conditional requests. - - :param environ: the WSGI environment of the request to be checked. - :param etag: the etag for the response for comparison. - :param data: or alternatively the data of the response to automatically - generate an etag using :func:`generate_etag`. - :param last_modified: an optional date of the last modification. - :param ignore_if_range: If `False`, `If-Range` header will be taken into - account. - :return: `True` if the resource was modified, otherwise `False`. - - .. versionchanged:: 2.0 - SHA-1 is used to generate an etag value for the data. MD5 may - not be available in some environments. - - .. versionchanged:: 1.0.0 - The check is run for methods other than ``GET`` and ``HEAD``. - """ - if etag is None and data is not None: - etag = generate_etag(data) - elif data is not None: - raise TypeError("both data and etag given") - - unmodified = False - if isinstance(last_modified, str): - last_modified = parse_date(last_modified) - - # HTTP doesn't use microsecond, remove it to avoid false positive - # comparisons. Mark naive datetimes as UTC. - if last_modified is not None: - last_modified = _dt_as_utc(last_modified.replace(microsecond=0)) - - if_range = None - if not ignore_if_range and "HTTP_RANGE" in environ: - # https://tools.ietf.org/html/rfc7233#section-3.2 - # A server MUST ignore an If-Range header field received in a request - # that does not contain a Range header field. - if_range = parse_if_range_header(environ.get("HTTP_IF_RANGE")) - - if if_range is not None and if_range.date is not None: - modified_since: t.Optional[datetime] = if_range.date - else: - modified_since = parse_date(environ.get("HTTP_IF_MODIFIED_SINCE")) - - if modified_since and last_modified and last_modified <= modified_since: - unmodified = True - - if etag: - etag, _ = unquote_etag(etag) - etag = t.cast(str, etag) - - if if_range is not None and if_range.etag is not None: - unmodified = parse_etags(if_range.etag).contains(etag) - else: - if_none_match = parse_etags(environ.get("HTTP_IF_NONE_MATCH")) - if if_none_match: - # https://tools.ietf.org/html/rfc7232#section-3.2 - # "A recipient MUST use the weak comparison function when comparing - # entity-tags for If-None-Match" - unmodified = if_none_match.contains_weak(etag) - - # https://tools.ietf.org/html/rfc7232#section-3.1 - # "Origin server MUST use the strong comparison function when - # comparing entity-tags for If-Match" - if_match = parse_etags(environ.get("HTTP_IF_MATCH")) - if if_match: - unmodified = not if_match.is_strong(etag) - - return not unmodified - - -def remove_entity_headers( - headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]], - allowed: t.Iterable[str] = ("expires", "content-location"), -) -> None: - """Remove all entity headers from a list or :class:`Headers` object. This - operation works in-place. `Expires` and `Content-Location` headers are - by default not removed. The reason for this is :rfc:`2616` section - 10.3.5 which specifies some entity headers that should be sent. - - .. versionchanged:: 0.5 - added `allowed` parameter. - - :param headers: a list or :class:`Headers` object. - :param allowed: a list of headers that should still be allowed even though - they are entity headers. - """ - allowed = {x.lower() for x in allowed} - headers[:] = [ - (key, value) - for key, value in headers - if not is_entity_header(key) or key.lower() in allowed - ] - - -def remove_hop_by_hop_headers( - headers: t.Union["ds.Headers", t.List[t.Tuple[str, str]]] -) -> None: - """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or - :class:`Headers` object. This operation works in-place. - - .. versionadded:: 0.5 - - :param headers: a list or :class:`Headers` object. - """ - headers[:] = [ - (key, value) for key, value in headers if not is_hop_by_hop_header(key) - ] - - -def is_entity_header(header: str) -> bool: - """Check if a header is an entity header. - - .. versionadded:: 0.5 - - :param header: the header to test. - :return: `True` if it's an entity header, `False` otherwise. - """ - return header.lower() in _entity_headers - - -def is_hop_by_hop_header(header: str) -> bool: - """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. - - .. versionadded:: 0.5 - - :param header: the header to test. - :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise. - """ - return header.lower() in _hop_by_hop_headers - - -def parse_cookie( - header: t.Union["WSGIEnvironment", str, bytes, None], - charset: str = "utf-8", - errors: str = "replace", - cls: t.Optional[t.Type["ds.MultiDict"]] = None, -) -> "ds.MultiDict[str, str]": - """Parse a cookie from a string or WSGI environ. - - The same key can be provided multiple times, the values are stored - in-order. The default :class:`MultiDict` will have the first value - first, and all values can be retrieved with - :meth:`MultiDict.getlist`. - - :param header: The cookie header as a string, or a WSGI environ dict - with a ``HTTP_COOKIE`` key. - :param charset: The charset for the cookie values. - :param errors: The error behavior for the charset decoding. - :param cls: A dict-like class to store the parsed cookies in. - Defaults to :class:`MultiDict`. - - .. versionchanged:: 1.0.0 - Returns a :class:`MultiDict` instead of a - ``TypeConversionDict``. - - .. versionchanged:: 0.5 - Returns a :class:`TypeConversionDict` instead of a regular dict. - The ``cls`` parameter was added. - """ - if isinstance(header, dict): - header = header.get("HTTP_COOKIE", "") - elif header is None: - header = "" - - # PEP 3333 sends headers through the environ as latin1 decoded - # strings. Encode strings back to bytes for parsing. - if isinstance(header, str): - header = header.encode("latin1", "replace") - - if cls is None: - cls = ds.MultiDict - - def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]: - for key, val in _cookie_parse_impl(header): # type: ignore - key_str = _to_str(key, charset, errors, allow_none_charset=True) - - if not key_str: - continue - - val_str = _to_str(val, charset, errors, allow_none_charset=True) - yield key_str, val_str - - return cls(_parse_pairs()) - - -def dump_cookie( - key: str, - value: t.Union[bytes, str] = "", - max_age: t.Optional[t.Union[timedelta, int]] = None, - expires: t.Optional[t.Union[str, datetime, int, float]] = None, - path: t.Optional[str] = "/", - domain: t.Optional[str] = None, - secure: bool = False, - httponly: bool = False, - charset: str = "utf-8", - sync_expires: bool = True, - max_size: int = 4093, - samesite: t.Optional[str] = None, -) -> str: - """Create a Set-Cookie header without the ``Set-Cookie`` prefix. - - The return value is usually restricted to ascii as the vast majority - of values are properly escaped, but that is no guarantee. It's - tunneled through latin1 as required by :pep:`3333`. - - The return value is not ASCII safe if the key contains unicode - characters. This is technically against the specification but - happens in the wild. It's strongly recommended to not use - non-ASCII values for the keys. - - :param max_age: should be a number of seconds, or `None` (default) if - the cookie should last only as long as the client's - browser session. Additionally `timedelta` objects - are accepted, too. - :param expires: should be a `datetime` object or unix timestamp. - :param path: limits the cookie to a given path, per default it will - span the whole domain. - :param domain: Use this if you want to set a cross-domain cookie. For - example, ``domain=".example.com"`` will set a cookie - that is readable by the domain ``www.example.com``, - ``foo.example.com`` etc. Otherwise, a cookie will only - be readable by the domain that set it. - :param secure: The cookie will only be available via HTTPS - :param httponly: disallow JavaScript to access the cookie. This is an - extension to the cookie standard and probably not - supported by all browsers. - :param charset: the encoding for string values. - :param sync_expires: automatically set expires if max_age is defined - but expires not. - :param max_size: Warn if the final header value exceeds this size. The - default, 4093, should be safely `supported by most browsers - `_. Set to 0 to disable this check. - :param samesite: Limits the scope of the cookie such that it will - only be attached to requests if those requests are same-site. - - .. _`cookie`: http://browsercookielimits.squawky.net/ - - .. versionchanged:: 1.0.0 - The string ``'None'`` is accepted for ``samesite``. - """ - key = _to_bytes(key, charset) - value = _to_bytes(value, charset) - - if path is not None: - from .urls import iri_to_uri - - path = iri_to_uri(path, charset) - - domain = _make_cookie_domain(domain) - - if isinstance(max_age, timedelta): - max_age = int(max_age.total_seconds()) - - if expires is not None: - if not isinstance(expires, str): - expires = http_date(expires) - elif max_age is not None and sync_expires: - expires = http_date(datetime.now(tz=timezone.utc).timestamp() + max_age) - - if samesite is not None: - samesite = samesite.title() - - if samesite not in {"Strict", "Lax", "None"}: - raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.") - - buf = [key + b"=" + _cookie_quote(value)] - - # XXX: In theory all of these parameters that are not marked with `None` - # should be quoted. Because stdlib did not quote it before I did not - # want to introduce quoting there now. - for k, v, q in ( - (b"Domain", domain, True), - (b"Expires", expires, False), - (b"Max-Age", max_age, False), - (b"Secure", secure, None), - (b"HttpOnly", httponly, None), - (b"Path", path, False), - (b"SameSite", samesite, False), - ): - if q is None: - if v: - buf.append(k) - continue - - if v is None: - continue - - tmp = bytearray(k) - if not isinstance(v, (bytes, bytearray)): - v = _to_bytes(str(v), charset) - if q: - v = _cookie_quote(v) - tmp += b"=" + v - buf.append(bytes(tmp)) - - # The return value will be an incorrectly encoded latin1 header for - # consistency with the headers object. - rv = b"; ".join(buf) - rv = rv.decode("latin1") - - # Warn if the final value of the cookie is larger than the limit. If the - # cookie is too large, then it may be silently ignored by the browser, - # which can be quite hard to debug. - cookie_size = len(rv) - - if max_size and cookie_size > max_size: - value_size = len(value) - warnings.warn( - f"The {key.decode(charset)!r} cookie is too large: the value was" - f" {value_size} bytes but the" - f" header required {cookie_size - value_size} extra bytes. The final size" - f" was {cookie_size} bytes but the limit is {max_size} bytes. Browsers may" - f" silently ignore cookies larger than this.", - stacklevel=2, - ) - - return rv - - -def is_byte_range_valid( - start: t.Optional[int], stop: t.Optional[int], length: t.Optional[int] -) -> bool: - """Checks if a given byte content range is valid for the given length. - - .. versionadded:: 0.7 - """ - if (start is None) != (stop is None): - return False - elif start is None: - return length is None or length >= 0 - elif length is None: - return 0 <= start < stop # type: ignore - elif start >= stop: # type: ignore - return False - return 0 <= start < length - - -# circular dependencies -from . import datastructures as ds diff --git a/venv/lib/python3.8/site-packages/werkzeug/local.py b/venv/lib/python3.8/site-packages/werkzeug/local.py deleted file mode 100644 index b4dee7b..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/local.py +++ /dev/null @@ -1,677 +0,0 @@ -import copy -import math -import operator -import sys -import typing as t -import warnings -from functools import partial -from functools import update_wrapper - -from .wsgi import ClosingIterator - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -try: - from greenlet import getcurrent as _get_ident -except ImportError: - from threading import get_ident as _get_ident - - -def get_ident() -> int: - warnings.warn( - "'get_ident' is deprecated and will be removed in Werkzeug" - " 2.1. Use 'greenlet.getcurrent' or 'threading.get_ident' for" - " previous behavior.", - DeprecationWarning, - stacklevel=2, - ) - return _get_ident() # type: ignore - - -class _CannotUseContextVar(Exception): - pass - - -try: - from contextvars import ContextVar - - if "gevent" in sys.modules or "eventlet" in sys.modules: - # Both use greenlet, so first check it has patched - # ContextVars, Greenlet <0.4.17 does not. - import greenlet - - greenlet_patched = getattr(greenlet, "GREENLET_USE_CONTEXT_VARS", False) - - if not greenlet_patched: - # If Gevent is used, check it has patched ContextVars, - # <20.5 does not. - try: - from gevent.monkey import is_object_patched - except ImportError: - # Gevent isn't used, but Greenlet is and hasn't patched - raise _CannotUseContextVar() from None - else: - if is_object_patched("threading", "local") and not is_object_patched( - "contextvars", "ContextVar" - ): - raise _CannotUseContextVar() - - def __release_local__(storage: t.Any) -> None: - # Can remove when support for non-stdlib ContextVars is - # removed, see "Fake" version below. - storage.set({}) - - -except (ImportError, _CannotUseContextVar): - - class ContextVar: # type: ignore - """A fake ContextVar based on the previous greenlet/threading - ident function. Used on Python 3.6, eventlet, and old versions - of gevent. - """ - - def __init__(self, _name: str) -> None: - self.storage: t.Dict[int, t.Dict[str, t.Any]] = {} - - def get(self, default: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: - return self.storage.get(_get_ident(), default) - - def set(self, value: t.Dict[str, t.Any]) -> None: - self.storage[_get_ident()] = value - - def __release_local__(storage: t.Any) -> None: - # Special version to ensure that the storage is cleaned up on - # release. - storage.storage.pop(_get_ident(), None) - - -def release_local(local: t.Union["Local", "LocalStack"]) -> None: - """Releases the contents of the local for the current context. - This makes it possible to use locals without a manager. - - Example:: - - >>> loc = Local() - >>> loc.foo = 42 - >>> release_local(loc) - >>> hasattr(loc, 'foo') - False - - With this function one can release :class:`Local` objects as well - as :class:`LocalStack` objects. However it is not possible to - release data held by proxies that way, one always has to retain - a reference to the underlying local object in order to be able - to release it. - - .. versionadded:: 0.6.1 - """ - local.__release_local__() - - -class Local: - __slots__ = ("_storage",) - - def __init__(self) -> None: - object.__setattr__(self, "_storage", ContextVar("local_storage")) - - @property - def __storage__(self) -> t.Dict[str, t.Any]: - warnings.warn( - "'__storage__' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return self._storage.get({}) # type: ignore - - @property - def __ident_func__(self) -> t.Callable[[], int]: - warnings.warn( - "'__ident_func__' is deprecated and will be removed in" - " Werkzeug 2.1. It should not be used in Python 3.7+.", - DeprecationWarning, - stacklevel=2, - ) - return _get_ident # type: ignore - - @__ident_func__.setter - def __ident_func__(self, func: t.Callable[[], int]) -> None: - warnings.warn( - "'__ident_func__' is deprecated and will be removed in" - " Werkzeug 2.1. Setting it no longer has any effect.", - DeprecationWarning, - stacklevel=2, - ) - - def __iter__(self) -> t.Iterator[t.Tuple[int, t.Any]]: - return iter(self._storage.get({}).items()) - - def __call__(self, proxy: str) -> "LocalProxy": - """Create a proxy for a name.""" - return LocalProxy(self, proxy) - - def __release_local__(self) -> None: - __release_local__(self._storage) - - def __getattr__(self, name: str) -> t.Any: - values = self._storage.get({}) - try: - return values[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - values = self._storage.get({}).copy() - values[name] = value - self._storage.set(values) - - def __delattr__(self, name: str) -> None: - values = self._storage.get({}).copy() - try: - del values[name] - self._storage.set(values) - except KeyError: - raise AttributeError(name) from None - - -class LocalStack: - """This class works similar to a :class:`Local` but keeps a stack - of objects instead. This is best explained with an example:: - - >>> ls = LocalStack() - >>> ls.push(42) - >>> ls.top - 42 - >>> ls.push(23) - >>> ls.top - 23 - >>> ls.pop() - 23 - >>> ls.top - 42 - - They can be force released by using a :class:`LocalManager` or with - the :func:`release_local` function but the correct way is to pop the - item from the stack after using. When the stack is empty it will - no longer be bound to the current context (and as such released). - - By calling the stack without arguments it returns a proxy that resolves to - the topmost item on the stack. - - .. versionadded:: 0.6.1 - """ - - def __init__(self) -> None: - self._local = Local() - - def __release_local__(self) -> None: - self._local.__release_local__() - - @property - def __ident_func__(self) -> t.Callable[[], int]: - return self._local.__ident_func__ - - @__ident_func__.setter - def __ident_func__(self, value: t.Callable[[], int]) -> None: - object.__setattr__(self._local, "__ident_func__", value) - - def __call__(self) -> "LocalProxy": - def _lookup() -> t.Any: - rv = self.top - if rv is None: - raise RuntimeError("object unbound") - return rv - - return LocalProxy(_lookup) - - def push(self, obj: t.Any) -> t.List[t.Any]: - """Pushes a new item to the stack""" - rv = getattr(self._local, "stack", []).copy() - rv.append(obj) - self._local.stack = rv - return rv # type: ignore - - def pop(self) -> t.Any: - """Removes the topmost item from the stack, will return the - old value or `None` if the stack was already empty. - """ - stack = getattr(self._local, "stack", None) - if stack is None: - return None - elif len(stack) == 1: - release_local(self._local) - return stack[-1] - else: - return stack.pop() - - @property - def top(self) -> t.Any: - """The topmost item on the stack. If the stack is empty, - `None` is returned. - """ - try: - return self._local.stack[-1] - except (AttributeError, IndexError): - return None - - -class LocalManager: - """Local objects cannot manage themselves. For that you need a local - manager. You can pass a local manager multiple locals or add them - later by appending them to `manager.locals`. Every time the manager - cleans up, it will clean up all the data left in the locals for this - context. - - .. versionchanged:: 2.0 - ``ident_func`` is deprecated and will be removed in Werkzeug - 2.1. - - .. versionchanged:: 0.6.1 - The :func:`release_local` function can be used instead of a - manager. - - .. versionchanged:: 0.7 - The ``ident_func`` parameter was added. - """ - - def __init__( - self, - locals: t.Optional[t.Iterable[t.Union[Local, LocalStack]]] = None, - ident_func: None = None, - ) -> None: - if locals is None: - self.locals = [] - elif isinstance(locals, Local): - self.locals = [locals] - else: - self.locals = list(locals) - - if ident_func is not None: - warnings.warn( - "'ident_func' is deprecated and will be removed in" - " Werkzeug 2.1. Setting it no longer has any effect.", - DeprecationWarning, - stacklevel=2, - ) - - @property - def ident_func(self) -> t.Callable[[], int]: - warnings.warn( - "'ident_func' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return _get_ident # type: ignore - - @ident_func.setter - def ident_func(self, func: t.Callable[[], int]) -> None: - warnings.warn( - "'ident_func' is deprecated and will be removedin Werkzeug" - " 2.1. Setting it no longer has any effect.", - DeprecationWarning, - stacklevel=2, - ) - - def get_ident(self) -> int: - """Return the context identifier the local objects use internally for - this context. You cannot override this method to change the behavior - but use it to link other context local objects (such as SQLAlchemy's - scoped sessions) to the Werkzeug locals. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. - - .. versionchanged:: 0.7 - You can pass a different ident function to the local manager that - will then be propagated to all the locals passed to the - constructor. - """ - warnings.warn( - "'get_ident' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return self.ident_func() - - def cleanup(self) -> None: - """Manually clean up the data in the locals for this context. Call - this at the end of the request or use `make_middleware()`. - """ - for local in self.locals: - release_local(local) - - def make_middleware(self, app: "WSGIApplication") -> "WSGIApplication": - """Wrap a WSGI application so that cleaning up happens after - request end. - """ - - def application( - environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - return ClosingIterator(app(environ, start_response), self.cleanup) - - return application - - def middleware(self, func: "WSGIApplication") -> "WSGIApplication": - """Like `make_middleware` but for decorating functions. - - Example usage:: - - @manager.middleware - def application(environ, start_response): - ... - - The difference to `make_middleware` is that the function passed - will have all the arguments copied from the inner application - (name, docstring, module). - """ - return update_wrapper(self.make_middleware(func), func) - - def __repr__(self) -> str: - return f"<{type(self).__name__} storages: {len(self.locals)}>" - - -class _ProxyLookup: - """Descriptor that handles proxied attribute lookup for - :class:`LocalProxy`. - - :param f: The built-in function this attribute is accessed through. - Instead of looking up the special method, the function call - is redone on the object. - :param fallback: Call this method if the proxy is unbound instead of - raising a :exc:`RuntimeError`. - :param class_value: Value to return when accessed from the class. - Used for ``__doc__`` so building docs still works. - """ - - __slots__ = ("bind_f", "fallback", "class_value", "name") - - def __init__( - self, - f: t.Optional[t.Callable] = None, - fallback: t.Optional[t.Callable] = None, - class_value: t.Optional[t.Any] = None, - ) -> None: - bind_f: t.Optional[t.Callable[["LocalProxy", t.Any], t.Callable]] - - if hasattr(f, "__get__"): - # A Python function, can be turned into a bound method. - - def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: - return f.__get__(obj, type(obj)) # type: ignore - - elif f is not None: - # A C function, use partial to bind the first argument. - - def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: - return partial(f, obj) # type: ignore - - else: - # Use getattr, which will produce a bound method. - bind_f = None - - self.bind_f = bind_f - self.fallback = fallback - self.class_value = class_value - - def __set_name__(self, owner: "LocalProxy", name: str) -> None: - self.name = name - - def __get__(self, instance: "LocalProxy", owner: t.Optional[type] = None) -> t.Any: - if instance is None: - if self.class_value is not None: - return self.class_value - - return self - - try: - obj = instance._get_current_object() - except RuntimeError: - if self.fallback is None: - raise - - return self.fallback.__get__(instance, owner) # type: ignore - - if self.bind_f is not None: - return self.bind_f(instance, obj) - - return getattr(obj, self.name) - - def __repr__(self) -> str: - return f"proxy {self.name}" - - def __call__(self, instance: "LocalProxy", *args: t.Any, **kwargs: t.Any) -> t.Any: - """Support calling unbound methods from the class. For example, - this happens with ``copy.copy``, which does - ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it - returns the proxy type and descriptor. - """ - return self.__get__(instance, type(instance))(*args, **kwargs) - - -class _ProxyIOp(_ProxyLookup): - """Look up an augmented assignment method on a proxied object. The - method is wrapped to return the proxy instead of the object. - """ - - __slots__ = () - - def __init__( - self, f: t.Optional[t.Callable] = None, fallback: t.Optional[t.Callable] = None - ) -> None: - super().__init__(f, fallback) - - def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable: - def i_op(self: t.Any, other: t.Any) -> "LocalProxy": - f(self, other) # type: ignore - return instance - - return i_op.__get__(obj, type(obj)) # type: ignore - - self.bind_f = bind_f - - -def _l_to_r_op(op: F) -> F: - """Swap the argument order to turn an l-op into an r-op.""" - - def r_op(obj: t.Any, other: t.Any) -> t.Any: - return op(other, obj) - - return t.cast(F, r_op) - - -class LocalProxy: - """A proxy to the object bound to a :class:`Local`. All operations - on the proxy are forwarded to the bound object. If no object is - bound, a :exc:`RuntimeError` is raised. - - .. code-block:: python - - from werkzeug.local import Local - l = Local() - - # a proxy to whatever l.user is set to - user = l("user") - - from werkzeug.local import LocalStack - _request_stack = LocalStack() - - # a proxy to _request_stack.top - request = _request_stack() - - # a proxy to the session attribute of the request proxy - session = LocalProxy(lambda: request.session) - - ``__repr__`` and ``__class__`` are forwarded, so ``repr(x)`` and - ``isinstance(x, cls)`` will look like the proxied object. Use - ``issubclass(type(x), LocalProxy)`` to check if an object is a - proxy. - - .. code-block:: python - - repr(user) # - isinstance(user, User) # True - issubclass(type(user), LocalProxy) # True - - :param local: The :class:`Local` or callable that provides the - proxied object. - :param name: The attribute name to look up on a :class:`Local`. Not - used if a callable is given. - - .. versionchanged:: 2.0 - Updated proxied attributes and methods to reflect the current - data model. - - .. versionchanged:: 0.6.1 - The class can be instantiated with a callable. - """ - - __slots__ = ("__local", "__name", "__wrapped__") - - def __init__( - self, - local: t.Union["Local", t.Callable[[], t.Any]], - name: t.Optional[str] = None, - ) -> None: - object.__setattr__(self, "_LocalProxy__local", local) - object.__setattr__(self, "_LocalProxy__name", name) - - if callable(local) and not hasattr(local, "__release_local__"): - # "local" is a callable that is not an instance of Local or - # LocalManager: mark it as a wrapped function. - object.__setattr__(self, "__wrapped__", local) - - def _get_current_object(self) -> t.Any: - """Return the current object. This is useful if you want the real - object behind the proxy at a time for performance reasons or because - you want to pass the object into a different context. - """ - if not hasattr(self.__local, "__release_local__"): # type: ignore - return self.__local() # type: ignore - - try: - return getattr(self.__local, self.__name) # type: ignore - except AttributeError: - name = self.__name # type: ignore - raise RuntimeError(f"no object bound to {name}") from None - - __doc__ = _ProxyLookup( # type: ignore - class_value=__doc__, fallback=lambda self: type(self).__doc__ - ) - # __del__ should only delete the proxy - __repr__ = _ProxyLookup( # type: ignore - repr, fallback=lambda self: f"<{type(self).__name__} unbound>" - ) - __str__ = _ProxyLookup(str) # type: ignore - __bytes__ = _ProxyLookup(bytes) - __format__ = _ProxyLookup() # type: ignore - __lt__ = _ProxyLookup(operator.lt) - __le__ = _ProxyLookup(operator.le) - __eq__ = _ProxyLookup(operator.eq) # type: ignore - __ne__ = _ProxyLookup(operator.ne) # type: ignore - __gt__ = _ProxyLookup(operator.gt) - __ge__ = _ProxyLookup(operator.ge) - __hash__ = _ProxyLookup(hash) # type: ignore - __bool__ = _ProxyLookup(bool, fallback=lambda self: False) - __getattr__ = _ProxyLookup(getattr) - # __getattribute__ triggered through __getattr__ - __setattr__ = _ProxyLookup(setattr) # type: ignore - __delattr__ = _ProxyLookup(delattr) # type: ignore - __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore - # __get__ (proxying descriptor not supported) - # __set__ (descriptor) - # __delete__ (descriptor) - # __set_name__ (descriptor) - # __objclass__ (descriptor) - # __slots__ used by proxy itself - # __dict__ (__getattr__) - # __weakref__ (__getattr__) - # __init_subclass__ (proxying metaclass not supported) - # __prepare__ (metaclass) - __class__ = _ProxyLookup(fallback=lambda self: type(self)) # type: ignore - __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) - __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) - # __class_getitem__ triggered through __getitem__ - __call__ = _ProxyLookup(lambda self, *args, **kwargs: self(*args, **kwargs)) - __len__ = _ProxyLookup(len) - __length_hint__ = _ProxyLookup(operator.length_hint) - __getitem__ = _ProxyLookup(operator.getitem) - __setitem__ = _ProxyLookup(operator.setitem) - __delitem__ = _ProxyLookup(operator.delitem) - # __missing__ triggered through __getitem__ - __iter__ = _ProxyLookup(iter) - __next__ = _ProxyLookup(next) - __reversed__ = _ProxyLookup(reversed) - __contains__ = _ProxyLookup(operator.contains) - __add__ = _ProxyLookup(operator.add) - __sub__ = _ProxyLookup(operator.sub) - __mul__ = _ProxyLookup(operator.mul) - __matmul__ = _ProxyLookup(operator.matmul) - __truediv__ = _ProxyLookup(operator.truediv) - __floordiv__ = _ProxyLookup(operator.floordiv) - __mod__ = _ProxyLookup(operator.mod) - __divmod__ = _ProxyLookup(divmod) - __pow__ = _ProxyLookup(pow) - __lshift__ = _ProxyLookup(operator.lshift) - __rshift__ = _ProxyLookup(operator.rshift) - __and__ = _ProxyLookup(operator.and_) - __xor__ = _ProxyLookup(operator.xor) - __or__ = _ProxyLookup(operator.or_) - __radd__ = _ProxyLookup(_l_to_r_op(operator.add)) - __rsub__ = _ProxyLookup(_l_to_r_op(operator.sub)) - __rmul__ = _ProxyLookup(_l_to_r_op(operator.mul)) - __rmatmul__ = _ProxyLookup(_l_to_r_op(operator.matmul)) - __rtruediv__ = _ProxyLookup(_l_to_r_op(operator.truediv)) - __rfloordiv__ = _ProxyLookup(_l_to_r_op(operator.floordiv)) - __rmod__ = _ProxyLookup(_l_to_r_op(operator.mod)) - __rdivmod__ = _ProxyLookup(_l_to_r_op(divmod)) - __rpow__ = _ProxyLookup(_l_to_r_op(pow)) - __rlshift__ = _ProxyLookup(_l_to_r_op(operator.lshift)) - __rrshift__ = _ProxyLookup(_l_to_r_op(operator.rshift)) - __rand__ = _ProxyLookup(_l_to_r_op(operator.and_)) - __rxor__ = _ProxyLookup(_l_to_r_op(operator.xor)) - __ror__ = _ProxyLookup(_l_to_r_op(operator.or_)) - __iadd__ = _ProxyIOp(operator.iadd) - __isub__ = _ProxyIOp(operator.isub) - __imul__ = _ProxyIOp(operator.imul) - __imatmul__ = _ProxyIOp(operator.imatmul) - __itruediv__ = _ProxyIOp(operator.itruediv) - __ifloordiv__ = _ProxyIOp(operator.ifloordiv) - __imod__ = _ProxyIOp(operator.imod) - __ipow__ = _ProxyIOp(operator.ipow) - __ilshift__ = _ProxyIOp(operator.ilshift) - __irshift__ = _ProxyIOp(operator.irshift) - __iand__ = _ProxyIOp(operator.iand) - __ixor__ = _ProxyIOp(operator.ixor) - __ior__ = _ProxyIOp(operator.ior) - __neg__ = _ProxyLookup(operator.neg) - __pos__ = _ProxyLookup(operator.pos) - __abs__ = _ProxyLookup(abs) - __invert__ = _ProxyLookup(operator.invert) - __complex__ = _ProxyLookup(complex) - __int__ = _ProxyLookup(int) - __float__ = _ProxyLookup(float) - __index__ = _ProxyLookup(operator.index) - __round__ = _ProxyLookup(round) - __trunc__ = _ProxyLookup(math.trunc) - __floor__ = _ProxyLookup(math.floor) - __ceil__ = _ProxyLookup(math.ceil) - __enter__ = _ProxyLookup() - __exit__ = _ProxyLookup() - __await__ = _ProxyLookup() - __aiter__ = _ProxyLookup() - __anext__ = _ProxyLookup() - __aenter__ = _ProxyLookup() - __aexit__ = _ProxyLookup() - __copy__ = _ProxyLookup(copy.copy) - __deepcopy__ = _ProxyLookup(copy.deepcopy) - # __getnewargs_ex__ (pickle through proxy not supported) - # __getnewargs__ (pickle) - # __getstate__ (pickle) - # __setstate__ (pickle) - # __reduce__ (pickle) - # __reduce_ex__ (pickle) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py deleted file mode 100644 index 6ddcf7f..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -""" -Middleware -========== - -A WSGI middleware is a WSGI application that wraps another application -in order to observe or change its behavior. Werkzeug provides some -middleware for common use cases. - -.. toctree:: - :maxdepth: 1 - - proxy_fix - shared_data - dispatcher - http_proxy - lint - profiler - -The :doc:`interactive debugger ` is also a middleware that can -be applied manually, although it is typically used automatically with -the :doc:`development server `. -""" diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py deleted file mode 100644 index ace1c75..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Application Dispatcher -====================== - -This middleware creates a single WSGI application that dispatches to -multiple other WSGI applications mounted at different URL paths. - -A common example is writing a Single Page Application, where you have a -backend API and a frontend written in JavaScript that does the routing -in the browser rather than requesting different pages from the server. -The frontend is a single HTML and JS file that should be served for any -path besides "/api". - -This example dispatches to an API app under "/api", an admin app -under "/admin", and an app that serves frontend files for all other -requests:: - - app = DispatcherMiddleware(serve_frontend, { - '/api': api_app, - '/admin': admin_app, - }) - -In production, you might instead handle this at the HTTP server level, -serving files or proxying to application servers based on location. The -API and admin apps would each be deployed with a separate WSGI server, -and the static files would be served directly by the HTTP server. - -.. autoclass:: DispatcherMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import typing as t - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class DispatcherMiddleware: - """Combine multiple applications as a single WSGI application. - Requests are dispatched to an application based on the path it is - mounted under. - - :param app: The WSGI application to dispatch to if the request - doesn't match a mounted path. - :param mounts: Maps path prefixes to applications for dispatching. - """ - - def __init__( - self, - app: "WSGIApplication", - mounts: t.Optional[t.Dict[str, "WSGIApplication"]] = None, - ) -> None: - self.app = app - self.mounts = mounts or {} - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - script = environ.get("PATH_INFO", "") - path_info = "" - - while "/" in script: - if script in self.mounts: - app = self.mounts[script] - break - - script, last_item = script.rsplit("/", 1) - path_info = f"/{last_item}{path_info}" - else: - app = self.mounts.get(script, self.app) - - original_script_name = environ.get("SCRIPT_NAME", "") - environ["SCRIPT_NAME"] = original_script_name + script - environ["PATH_INFO"] = path_info - return app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py deleted file mode 100644 index 1cde458..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py +++ /dev/null @@ -1,230 +0,0 @@ -""" -Basic HTTP Proxy -================ - -.. autoclass:: ProxyMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import typing as t -from http import client - -from ..datastructures import EnvironHeaders -from ..http import is_hop_by_hop_header -from ..urls import url_parse -from ..urls import url_quote -from ..wsgi import get_input_stream - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class ProxyMiddleware: - """Proxy requests under a path to an external server, routing other - requests to the app. - - This middleware can only proxy HTTP requests, as HTTP is the only - protocol handled by the WSGI server. Other protocols, such as - WebSocket requests, cannot be proxied at this layer. This should - only be used for development, in production a real proxy server - should be used. - - The middleware takes a dict mapping a path prefix to a dict - describing the host to be proxied to:: - - app = ProxyMiddleware(app, { - "/static/": { - "target": "http://127.0.0.1:5001/", - } - }) - - Each host has the following options: - - ``target``: - The target URL to dispatch to. This is required. - ``remove_prefix``: - Whether to remove the prefix from the URL before dispatching it - to the target. The default is ``False``. - ``host``: - ``""`` (default): - The host header is automatically rewritten to the URL of the - target. - ``None``: - The host header is unmodified from the client request. - Any other value: - The host header is overwritten with the value. - ``headers``: - A dictionary of headers to be sent with the request to the - target. The default is ``{}``. - ``ssl_context``: - A :class:`ssl.SSLContext` defining how to verify requests if the - target is HTTPS. The default is ``None``. - - In the example above, everything under ``"/static/"`` is proxied to - the server on port 5001. The host header is rewritten to the target, - and the ``"/static/"`` prefix is removed from the URLs. - - :param app: The WSGI application to wrap. - :param targets: Proxy target configurations. See description above. - :param chunk_size: Size of chunks to read from input stream and - write to target. - :param timeout: Seconds before an operation to a target fails. - - .. versionadded:: 0.14 - """ - - def __init__( - self, - app: "WSGIApplication", - targets: t.Mapping[str, t.Dict[str, t.Any]], - chunk_size: int = 2 << 13, - timeout: int = 10, - ) -> None: - def _set_defaults(opts: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: - opts.setdefault("remove_prefix", False) - opts.setdefault("host", "") - opts.setdefault("headers", {}) - opts.setdefault("ssl_context", None) - return opts - - self.app = app - self.targets = { - f"/{k.strip('/')}/": _set_defaults(v) for k, v in targets.items() - } - self.chunk_size = chunk_size - self.timeout = timeout - - def proxy_to( - self, opts: t.Dict[str, t.Any], path: str, prefix: str - ) -> "WSGIApplication": - target = url_parse(opts["target"]) - host = t.cast(str, target.ascii_host) - - def application( - environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - headers = list(EnvironHeaders(environ).items()) - headers[:] = [ - (k, v) - for k, v in headers - if not is_hop_by_hop_header(k) - and k.lower() not in ("content-length", "host") - ] - headers.append(("Connection", "close")) - - if opts["host"] == "": - headers.append(("Host", host)) - elif opts["host"] is None: - headers.append(("Host", environ["HTTP_HOST"])) - else: - headers.append(("Host", opts["host"])) - - headers.extend(opts["headers"].items()) - remote_path = path - - if opts["remove_prefix"]: - remote_path = remote_path[len(prefix) :].lstrip("/") - remote_path = f"{target.path.rstrip('/')}/{remote_path}" - - content_length = environ.get("CONTENT_LENGTH") - chunked = False - - if content_length not in ("", None): - headers.append(("Content-Length", content_length)) # type: ignore - elif content_length is not None: - headers.append(("Transfer-Encoding", "chunked")) - chunked = True - - try: - if target.scheme == "http": - con = client.HTTPConnection( - host, target.port or 80, timeout=self.timeout - ) - elif target.scheme == "https": - con = client.HTTPSConnection( - host, - target.port or 443, - timeout=self.timeout, - context=opts["ssl_context"], - ) - else: - raise RuntimeError( - "Target scheme must be 'http' or 'https', got" - f" {target.scheme!r}." - ) - - con.connect() - remote_url = url_quote(remote_path) - querystring = environ["QUERY_STRING"] - - if querystring: - remote_url = f"{remote_url}?{querystring}" - - con.putrequest(environ["REQUEST_METHOD"], remote_url, skip_host=True) - - for k, v in headers: - if k.lower() == "connection": - v = "close" - - con.putheader(k, v) - - con.endheaders() - stream = get_input_stream(environ) - - while True: - data = stream.read(self.chunk_size) - - if not data: - break - - if chunked: - con.send(b"%x\r\n%s\r\n" % (len(data), data)) - else: - con.send(data) - - resp = con.getresponse() - except OSError: - from ..exceptions import BadGateway - - return BadGateway()(environ, start_response) - - start_response( - f"{resp.status} {resp.reason}", - [ - (k.title(), v) - for k, v in resp.getheaders() - if not is_hop_by_hop_header(k) - ], - ) - - def read() -> t.Iterator[bytes]: - while True: - try: - data = resp.read(self.chunk_size) - except OSError: - break - - if not data: - break - - yield data - - return read() - - return application - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - path = environ["PATH_INFO"] - app = self.app - - for prefix, opts in self.targets.items(): - if path.startswith(prefix): - app = self.proxy_to(opts, path, prefix) - break - - return app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py deleted file mode 100644 index c74703b..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/lint.py +++ /dev/null @@ -1,420 +0,0 @@ -""" -WSGI Protocol Linter -==================== - -This module provides a middleware that performs sanity checks on the -behavior of the WSGI server and application. It checks that the -:pep:`3333` WSGI spec is properly implemented. It also warns on some -common HTTP errors such as non-empty responses for 304 status codes. - -.. autoclass:: LintMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import typing as t -from types import TracebackType -from urllib.parse import urlparse -from warnings import warn - -from ..datastructures import Headers -from ..http import is_entity_header -from ..wsgi import FileWrapper - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class WSGIWarning(Warning): - """Warning class for WSGI warnings.""" - - -class HTTPWarning(Warning): - """Warning class for HTTP warnings.""" - - -def check_type(context: str, obj: object, need: t.Type = str) -> None: - if type(obj) is not need: - warn( - f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.", - WSGIWarning, - stacklevel=3, - ) - - -class InputStream: - def __init__(self, stream: t.IO[bytes]) -> None: - self._stream = stream - - def read(self, *args: t.Any) -> bytes: - if len(args) == 0: - warn( - "WSGI does not guarantee an EOF marker on the input stream, thus making" - " calls to 'wsgi.input.read()' unsafe. Conforming servers may never" - " return from this call.", - WSGIWarning, - stacklevel=2, - ) - elif len(args) != 1: - warn( - "Too many parameters passed to 'wsgi.input.read()'.", - WSGIWarning, - stacklevel=2, - ) - return self._stream.read(*args) - - def readline(self, *args: t.Any) -> bytes: - if len(args) == 0: - warn( - "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use" - " 'wsgi.input.read()' instead.", - WSGIWarning, - stacklevel=2, - ) - elif len(args) == 1: - warn( - "'wsgi.input.readline()' was called with a size hint. WSGI does not" - " support this, although it's available on all major servers.", - WSGIWarning, - stacklevel=2, - ) - else: - raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.") - return self._stream.readline(*args) - - def __iter__(self) -> t.Iterator[bytes]: - try: - return iter(self._stream) - except TypeError: - warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2) - return iter(()) - - def close(self) -> None: - warn("The application closed the input stream!", WSGIWarning, stacklevel=2) - self._stream.close() - - -class ErrorStream: - def __init__(self, stream: t.IO[str]) -> None: - self._stream = stream - - def write(self, s: str) -> None: - check_type("wsgi.error.write()", s, str) - self._stream.write(s) - - def flush(self) -> None: - self._stream.flush() - - def writelines(self, seq: t.Iterable[str]) -> None: - for line in seq: - self.write(line) - - def close(self) -> None: - warn("The application closed the error stream!", WSGIWarning, stacklevel=2) - self._stream.close() - - -class GuardedWrite: - def __init__(self, write: t.Callable[[bytes], None], chunks: t.List[int]) -> None: - self._write = write - self._chunks = chunks - - def __call__(self, s: bytes) -> None: - check_type("write()", s, bytes) - self._write(s) - self._chunks.append(len(s)) - - -class GuardedIterator: - def __init__( - self, - iterator: t.Iterable[bytes], - headers_set: t.Tuple[int, Headers], - chunks: t.List[int], - ) -> None: - self._iterator = iterator - self._next = iter(iterator).__next__ - self.closed = False - self.headers_set = headers_set - self.chunks = chunks - - def __iter__(self) -> "GuardedIterator": - return self - - def __next__(self) -> bytes: - if self.closed: - warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2) - - rv = self._next() - - if not self.headers_set: - warn( - "The application returned before it started the response.", - WSGIWarning, - stacklevel=2, - ) - - check_type("application iterator items", rv, bytes) - self.chunks.append(len(rv)) - return rv - - def close(self) -> None: - self.closed = True - - if hasattr(self._iterator, "close"): - self._iterator.close() # type: ignore - - if self.headers_set: - status_code, headers = self.headers_set - bytes_sent = sum(self.chunks) - content_length = headers.get("content-length", type=int) - - if status_code == 304: - for key, _value in headers: - key = key.lower() - if key not in ("expires", "content-location") and is_entity_header( - key - ): - warn( - f"Entity header {key!r} found in 304 response.", HTTPWarning - ) - if bytes_sent: - warn("304 responses must not have a body.", HTTPWarning) - elif 100 <= status_code < 200 or status_code == 204: - if content_length != 0: - warn( - f"{status_code} responses must have an empty content length.", - HTTPWarning, - ) - if bytes_sent: - warn(f"{status_code} responses must not have a body.", HTTPWarning) - elif content_length is not None and content_length != bytes_sent: - warn( - "Content-Length and the number of bytes sent to the" - " client do not match.", - WSGIWarning, - ) - - def __del__(self) -> None: - if not self.closed: - try: - warn( - "Iterator was garbage collected before it was closed.", WSGIWarning - ) - except Exception: - pass - - -class LintMiddleware: - """Warns about common errors in the WSGI and HTTP behavior of the - server and wrapped application. Some of the issues it checks are: - - - invalid status codes - - non-bytes sent to the WSGI server - - strings returned from the WSGI application - - non-empty conditional responses - - unquoted etags - - relative URLs in the Location header - - unsafe calls to wsgi.input - - unclosed iterators - - Error information is emitted using the :mod:`warnings` module. - - :param app: The WSGI application to wrap. - - .. code-block:: python - - from werkzeug.middleware.lint import LintMiddleware - app = LintMiddleware(app) - """ - - def __init__(self, app: "WSGIApplication") -> None: - self.app = app - - def check_environ(self, environ: "WSGIEnvironment") -> None: - if type(environ) is not dict: - warn( - "WSGI environment is not a standard Python dict.", - WSGIWarning, - stacklevel=4, - ) - for key in ( - "REQUEST_METHOD", - "SERVER_NAME", - "SERVER_PORT", - "wsgi.version", - "wsgi.input", - "wsgi.errors", - "wsgi.multithread", - "wsgi.multiprocess", - "wsgi.run_once", - ): - if key not in environ: - warn( - f"Required environment key {key!r} not found", - WSGIWarning, - stacklevel=3, - ) - if environ["wsgi.version"] != (1, 0): - warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3) - - script_name = environ.get("SCRIPT_NAME", "") - path_info = environ.get("PATH_INFO", "") - - if script_name and script_name[0] != "/": - warn( - f"'SCRIPT_NAME' does not start with a slash: {script_name!r}", - WSGIWarning, - stacklevel=3, - ) - - if path_info and path_info[0] != "/": - warn( - f"'PATH_INFO' does not start with a slash: {path_info!r}", - WSGIWarning, - stacklevel=3, - ) - - def check_start_response( - self, - status: str, - headers: t.List[t.Tuple[str, str]], - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ], - ) -> t.Tuple[int, Headers]: - check_type("status", status, str) - status_code_str = status.split(None, 1)[0] - - if len(status_code_str) != 3 or not status_code_str.isdigit(): - warn("Status code must be three digits.", WSGIWarning, stacklevel=3) - - if len(status) < 4 or status[3] != " ": - warn( - f"Invalid value for status {status!r}. Valid status strings are three" - " digits, a space and a status explanation.", - WSGIWarning, - stacklevel=3, - ) - - status_code = int(status_code_str) - - if status_code < 100: - warn("Status code < 100 detected.", WSGIWarning, stacklevel=3) - - if type(headers) is not list: - warn("Header list is not a list.", WSGIWarning, stacklevel=3) - - for item in headers: - if type(item) is not tuple or len(item) != 2: - warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3) - name, value = item - if type(name) is not str or type(value) is not str: - warn( - "Header keys and values must be strings.", WSGIWarning, stacklevel=3 - ) - if name.lower() == "status": - warn( - "The status header is not supported due to" - " conflicts with the CGI spec.", - WSGIWarning, - stacklevel=3, - ) - - if exc_info is not None and not isinstance(exc_info, tuple): - warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) - - headers = Headers(headers) - self.check_headers(headers) - - return status_code, headers - - def check_headers(self, headers: Headers) -> None: - etag = headers.get("etag") - - if etag is not None: - if etag.startswith(("W/", "w/")): - if etag.startswith("w/"): - warn( - "Weak etag indicator should be upper case.", - HTTPWarning, - stacklevel=4, - ) - - etag = etag[2:] - - if not (etag[:1] == etag[-1:] == '"'): - warn("Unquoted etag emitted.", HTTPWarning, stacklevel=4) - - location = headers.get("location") - - if location is not None: - if not urlparse(location).netloc: - warn( - "Absolute URLs required for location header.", - HTTPWarning, - stacklevel=4, - ) - - def check_iterator(self, app_iter: t.Iterable[bytes]) -> None: - if isinstance(app_iter, bytes): - warn( - "The application returned a bytestring. The response will send one" - " character at a time to the client, which will kill performance." - " Return a list or iterable instead.", - WSGIWarning, - stacklevel=3, - ) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Iterable[bytes]: - if len(args) != 2: - warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2) - - if kwargs: - warn( - "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2 - ) - - environ: "WSGIEnvironment" = args[0] - start_response: "StartResponse" = args[1] - - self.check_environ(environ) - environ["wsgi.input"] = InputStream(environ["wsgi.input"]) - environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"]) - - # Hook our own file wrapper in so that applications will always - # iterate to the end and we can check the content length. - environ["wsgi.file_wrapper"] = FileWrapper - - headers_set: t.List[t.Any] = [] - chunks: t.List[int] = [] - - def checking_start_response( - *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[bytes], None]: - if len(args) not in {2, 3}: - warn( - f"Invalid number of arguments: {len(args)}, expected 2 or 3.", - WSGIWarning, - stacklevel=2, - ) - - if kwargs: - warn("'start_response' does not take keyword arguments.", WSGIWarning) - - status: str = args[0] - headers: t.List[t.Tuple[str, str]] = args[1] - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ] = (args[2] if len(args) == 3 else None) - - headers_set[:] = self.check_start_response(status, headers, exc_info) - return GuardedWrite(start_response(status, headers, exc_info), chunks) - - app_iter = self.app(environ, t.cast("StartResponse", checking_start_response)) - self.check_iterator(app_iter) - return GuardedIterator( - app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks - ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py deleted file mode 100644 index 200dae0..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/profiler.py +++ /dev/null @@ -1,139 +0,0 @@ -""" -Application Profiler -==================== - -This module provides a middleware that profiles each request with the -:mod:`cProfile` module. This can help identify bottlenecks in your code -that may be slowing down your application. - -.. autoclass:: ProfilerMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import os.path -import sys -import time -import typing as t -from pstats import Stats - -try: - from cProfile import Profile -except ImportError: - from profile import Profile # type: ignore - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class ProfilerMiddleware: - """Wrap a WSGI application and profile the execution of each - request. Responses are buffered so that timings are more exact. - - If ``stream`` is given, :class:`pstats.Stats` are written to it - after each request. If ``profile_dir`` is given, :mod:`cProfile` - data files are saved to that directory, one file per request. - - The filename can be customized by passing ``filename_format``. If - it is a string, it will be formatted using :meth:`str.format` with - the following fields available: - - - ``{method}`` - The request method; GET, POST, etc. - - ``{path}`` - The request path or 'root' should one not exist. - - ``{elapsed}`` - The elapsed time of the request. - - ``{time}`` - The time of the request. - - If it is a callable, it will be called with the WSGI ``environ`` - dict and should return a filename. - - :param app: The WSGI application to wrap. - :param stream: Write stats to this stream. Disable with ``None``. - :param sort_by: A tuple of columns to sort stats by. See - :meth:`pstats.Stats.sort_stats`. - :param restrictions: A tuple of restrictions to filter stats by. See - :meth:`pstats.Stats.print_stats`. - :param profile_dir: Save profile data files to this directory. - :param filename_format: Format string for profile data file names, - or a callable returning a name. See explanation above. - - .. code-block:: python - - from werkzeug.middleware.profiler import ProfilerMiddleware - app = ProfilerMiddleware(app) - - .. versionchanged:: 0.15 - Stats are written even if ``profile_dir`` is given, and can be - disable by passing ``stream=None``. - - .. versionadded:: 0.15 - Added ``filename_format``. - - .. versionadded:: 0.9 - Added ``restrictions`` and ``profile_dir``. - """ - - def __init__( - self, - app: "WSGIApplication", - stream: t.IO[str] = sys.stdout, - sort_by: t.Iterable[str] = ("time", "calls"), - restrictions: t.Iterable[t.Union[str, int, float]] = (), - profile_dir: t.Optional[str] = None, - filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof", - ) -> None: - self._app = app - self._stream = stream - self._sort_by = sort_by - self._restrictions = restrictions - self._profile_dir = profile_dir - self._filename_format = filename_format - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - response_body: t.List[bytes] = [] - - def catching_start_response(status, headers, exc_info=None): # type: ignore - start_response(status, headers, exc_info) - return response_body.append - - def runapp() -> None: - app_iter = self._app( - environ, t.cast("StartResponse", catching_start_response) - ) - response_body.extend(app_iter) - - if hasattr(app_iter, "close"): - app_iter.close() # type: ignore - - profile = Profile() - start = time.time() - profile.runcall(runapp) - body = b"".join(response_body) - elapsed = time.time() - start - - if self._profile_dir is not None: - if callable(self._filename_format): - filename = self._filename_format(environ) - else: - filename = self._filename_format.format( - method=environ["REQUEST_METHOD"], - path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root", - elapsed=elapsed * 1000.0, - time=time.time(), - ) - filename = os.path.join(self._profile_dir, filename) - profile.dump_stats(filename) - - if self._stream is not None: - stats = Stats(profile, stream=self._stream) - stats.sort_stats(*self._sort_by) - print("-" * 80, file=self._stream) - path_info = environ.get("PATH_INFO", "") - print(f"PATH: {path_info!r}", file=self._stream) - stats.print_stats(*self._restrictions) - print(f"{'-' * 80}\n", file=self._stream) - - return [body] diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py deleted file mode 100644 index e90b1b3..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py +++ /dev/null @@ -1,187 +0,0 @@ -""" -X-Forwarded-For Proxy Fix -========================= - -This module provides a middleware that adjusts the WSGI environ based on -``X-Forwarded-`` headers that proxies in front of an application may -set. - -When an application is running behind a proxy server, WSGI may see the -request as coming from that server rather than the real client. Proxies -set various headers to track where the request actually came from. - -This middleware should only be used if the application is actually -behind such a proxy, and should be configured with the number of proxies -that are chained in front of it. Not all proxies set all the headers. -Since incoming headers can be faked, you must set how many proxies are -setting each header so the middleware knows what to trust. - -.. autoclass:: ProxyFix - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import typing as t - -from ..http import parse_list_header - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class ProxyFix: - """Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in - front of the application may set. - - - ``X-Forwarded-For`` sets ``REMOTE_ADDR``. - - ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``. - - ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and - ``SERVER_PORT``. - - ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``. - - ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``. - - You must tell the middleware how many proxies set each header so it - knows what values to trust. It is a security issue to trust values - that came from the client rather than a proxy. - - The original values of the headers are stored in the WSGI - environ as ``werkzeug.proxy_fix.orig``, a dict. - - :param app: The WSGI application to wrap. - :param x_for: Number of values to trust for ``X-Forwarded-For``. - :param x_proto: Number of values to trust for ``X-Forwarded-Proto``. - :param x_host: Number of values to trust for ``X-Forwarded-Host``. - :param x_port: Number of values to trust for ``X-Forwarded-Port``. - :param x_prefix: Number of values to trust for - ``X-Forwarded-Prefix``. - - .. code-block:: python - - from werkzeug.middleware.proxy_fix import ProxyFix - # App is behind one proxy that sets the -For and -Host headers. - app = ProxyFix(app, x_for=1, x_host=1) - - .. versionchanged:: 1.0 - Deprecated code has been removed: - - * The ``num_proxies`` argument and attribute. - * The ``get_remote_addr`` method. - * The environ keys ``orig_remote_addr``, - ``orig_wsgi_url_scheme``, and ``orig_http_host``. - - .. versionchanged:: 0.15 - All headers support multiple values. The ``num_proxies`` - argument is deprecated. Each header is configured with a - separate number of trusted proxies. - - .. versionchanged:: 0.15 - Original WSGI environ values are stored in the - ``werkzeug.proxy_fix.orig`` dict. ``orig_remote_addr``, - ``orig_wsgi_url_scheme``, and ``orig_http_host`` are deprecated - and will be removed in 1.0. - - .. versionchanged:: 0.15 - Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``. - - .. versionchanged:: 0.15 - ``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify - ``SERVER_NAME`` and ``SERVER_PORT``. - """ - - def __init__( - self, - app: "WSGIApplication", - x_for: int = 1, - x_proto: int = 1, - x_host: int = 0, - x_port: int = 0, - x_prefix: int = 0, - ) -> None: - self.app = app - self.x_for = x_for - self.x_proto = x_proto - self.x_host = x_host - self.x_port = x_port - self.x_prefix = x_prefix - - def _get_real_value(self, trusted: int, value: t.Optional[str]) -> t.Optional[str]: - """Get the real value from a list header based on the configured - number of trusted proxies. - - :param trusted: Number of values to trust in the header. - :param value: Comma separated list header value to parse. - :return: The real value, or ``None`` if there are fewer values - than the number of trusted proxies. - - .. versionchanged:: 1.0 - Renamed from ``_get_trusted_comma``. - - .. versionadded:: 0.15 - """ - if not (trusted and value): - return None - values = parse_list_header(value) - if len(values) >= trusted: - return values[-trusted] - return None - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - """Modify the WSGI environ based on the various ``Forwarded`` - headers before calling the wrapped application. Store the - original environ values in ``werkzeug.proxy_fix.orig_{key}``. - """ - environ_get = environ.get - orig_remote_addr = environ_get("REMOTE_ADDR") - orig_wsgi_url_scheme = environ_get("wsgi.url_scheme") - orig_http_host = environ_get("HTTP_HOST") - environ.update( - { - "werkzeug.proxy_fix.orig": { - "REMOTE_ADDR": orig_remote_addr, - "wsgi.url_scheme": orig_wsgi_url_scheme, - "HTTP_HOST": orig_http_host, - "SERVER_NAME": environ_get("SERVER_NAME"), - "SERVER_PORT": environ_get("SERVER_PORT"), - "SCRIPT_NAME": environ_get("SCRIPT_NAME"), - } - } - ) - - x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR")) - if x_for: - environ["REMOTE_ADDR"] = x_for - - x_proto = self._get_real_value( - self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO") - ) - if x_proto: - environ["wsgi.url_scheme"] = x_proto - - x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST")) - if x_host: - environ["HTTP_HOST"] = x_host - parts = x_host.split(":", 1) - environ["SERVER_NAME"] = parts[0] - if len(parts) == 2: - environ["SERVER_PORT"] = parts[1] - - x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT")) - if x_port: - host = environ.get("HTTP_HOST") - if host: - parts = host.split(":", 1) - host = parts[0] if len(parts) == 2 else host - environ["HTTP_HOST"] = f"{host}:{x_port}" - environ["SERVER_PORT"] = x_port - - x_prefix = self._get_real_value( - self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX") - ) - if x_prefix: - environ["SCRIPT_NAME"] = x_prefix - - return self.app(environ, start_response) diff --git a/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py b/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py deleted file mode 100644 index 62da672..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py +++ /dev/null @@ -1,320 +0,0 @@ -""" -Serve Shared Static Files -========================= - -.. autoclass:: SharedDataMiddleware - :members: is_allowed - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import mimetypes -import os -import pkgutil -import posixpath -import typing as t -from datetime import datetime -from datetime import timezone -from io import BytesIO -from time import time -from zlib import adler32 - -from ..filesystem import get_filesystem_encoding -from ..http import http_date -from ..http import is_resource_modified -from ..security import safe_join -from ..utils import get_content_type -from ..wsgi import get_path_info -from ..wsgi import wrap_file - -_TOpener = t.Callable[[], t.Tuple[t.IO[bytes], datetime, int]] -_TLoader = t.Callable[[t.Optional[str]], t.Tuple[t.Optional[str], t.Optional[_TOpener]]] - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class SharedDataMiddleware: - - """A WSGI middleware which provides static content for development - environments or simple server setups. Its usage is quite simple:: - - import os - from werkzeug.middleware.shared_data import SharedDataMiddleware - - app = SharedDataMiddleware(app, { - '/shared': os.path.join(os.path.dirname(__file__), 'shared') - }) - - The contents of the folder ``./shared`` will now be available on - ``http://example.com/shared/``. This is pretty useful during development - because a standalone media server is not required. Files can also be - mounted on the root folder and still continue to use the application because - the shared data middleware forwards all unhandled requests to the - application, even if the requests are below one of the shared folders. - - If `pkg_resources` is available you can also tell the middleware to serve - files from package data:: - - app = SharedDataMiddleware(app, { - '/static': ('myapplication', 'static') - }) - - This will then serve the ``static`` folder in the `myapplication` - Python package. - - The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch` - rules for files that are not accessible from the web. If `cache` is set to - `False` no caching headers are sent. - - Currently the middleware does not support non-ASCII filenames. If the - encoding on the file system happens to match the encoding of the URI it may - work but this could also be by accident. We strongly suggest using ASCII - only file names for static files. - - The middleware will guess the mimetype using the Python `mimetype` - module. If it's unable to figure out the charset it will fall back - to `fallback_mimetype`. - - :param app: the application to wrap. If you don't want to wrap an - application you can pass it :exc:`NotFound`. - :param exports: a list or dict of exported files and folders. - :param disallow: a list of :func:`~fnmatch.fnmatch` rules. - :param cache: enable or disable caching headers. - :param cache_timeout: the cache timeout in seconds for the headers. - :param fallback_mimetype: The fallback mimetype for unknown files. - - .. versionchanged:: 1.0 - The default ``fallback_mimetype`` is - ``application/octet-stream``. If a filename looks like a text - mimetype, the ``utf-8`` charset is added to it. - - .. versionadded:: 0.6 - Added ``fallback_mimetype``. - - .. versionchanged:: 0.5 - Added ``cache_timeout``. - """ - - def __init__( - self, - app: "WSGIApplication", - exports: t.Union[ - t.Dict[str, t.Union[str, t.Tuple[str, str]]], - t.Iterable[t.Tuple[str, t.Union[str, t.Tuple[str, str]]]], - ], - disallow: None = None, - cache: bool = True, - cache_timeout: int = 60 * 60 * 12, - fallback_mimetype: str = "application/octet-stream", - ) -> None: - self.app = app - self.exports: t.List[t.Tuple[str, _TLoader]] = [] - self.cache = cache - self.cache_timeout = cache_timeout - - if isinstance(exports, dict): - exports = exports.items() - - for key, value in exports: - if isinstance(value, tuple): - loader = self.get_package_loader(*value) - elif isinstance(value, str): - if os.path.isfile(value): - loader = self.get_file_loader(value) - else: - loader = self.get_directory_loader(value) - else: - raise TypeError(f"unknown def {value!r}") - - self.exports.append((key, loader)) - - if disallow is not None: - from fnmatch import fnmatch - - self.is_allowed = lambda x: not fnmatch(x, disallow) - - self.fallback_mimetype = fallback_mimetype - - def is_allowed(self, filename: str) -> bool: - """Subclasses can override this method to disallow the access to - certain files. However by providing `disallow` in the constructor - this method is overwritten. - """ - return True - - def _opener(self, filename: str) -> _TOpener: - return lambda: ( - open(filename, "rb"), - datetime.fromtimestamp(os.path.getmtime(filename), tz=timezone.utc), - int(os.path.getsize(filename)), - ) - - def get_file_loader(self, filename: str) -> _TLoader: - return lambda x: (os.path.basename(filename), self._opener(filename)) - - def get_package_loader(self, package: str, package_path: str) -> _TLoader: - load_time = datetime.now(timezone.utc) - provider = pkgutil.get_loader(package) - - if hasattr(provider, "get_resource_reader"): - # Python 3 - reader = provider.get_resource_reader(package) # type: ignore - - def loader( - path: t.Optional[str], - ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: - if path is None: - return None, None - - path = safe_join(package_path, path) - - if path is None: - return None, None - - basename = posixpath.basename(path) - - try: - resource = reader.open_resource(path) - except OSError: - return None, None - - if isinstance(resource, BytesIO): - return ( - basename, - lambda: (resource, load_time, len(resource.getvalue())), - ) - - return ( - basename, - lambda: ( - resource, - datetime.fromtimestamp( - os.path.getmtime(resource.name), tz=timezone.utc - ), - os.path.getsize(resource.name), - ), - ) - - else: - # Python 3.6 - package_filename = provider.get_filename(package) # type: ignore - is_filesystem = os.path.exists(package_filename) - root = os.path.join(os.path.dirname(package_filename), package_path) - - def loader( - path: t.Optional[str], - ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: - if path is None: - return None, None - - path = safe_join(root, path) - - if path is None: - return None, None - - basename = posixpath.basename(path) - - if is_filesystem: - if not os.path.isfile(path): - return None, None - - return basename, self._opener(path) - - try: - data = provider.get_data(path) # type: ignore - except OSError: - return None, None - - return basename, lambda: (BytesIO(data), load_time, len(data)) - - return loader - - def get_directory_loader(self, directory: str) -> _TLoader: - def loader( - path: t.Optional[str], - ) -> t.Tuple[t.Optional[str], t.Optional[_TOpener]]: - if path is not None: - path = safe_join(directory, path) - - if path is None: - return None, None - else: - path = directory - - if os.path.isfile(path): - return os.path.basename(path), self._opener(path) - - return None, None - - return loader - - def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str: - if not isinstance(real_filename, bytes): - real_filename = real_filename.encode( # type: ignore - get_filesystem_encoding() - ) - - timestamp = mtime.timestamp() - checksum = adler32(real_filename) & 0xFFFFFFFF # type: ignore - return f"wzsdm-{timestamp}-{file_size}-{checksum}" - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - path = get_path_info(environ) - file_loader = None - - for search_path, loader in self.exports: - if search_path == path: - real_filename, file_loader = loader(None) - - if file_loader is not None: - break - - if not search_path.endswith("/"): - search_path += "/" - - if path.startswith(search_path): - real_filename, file_loader = loader(path[len(search_path) :]) - - if file_loader is not None: - break - - if file_loader is None or not self.is_allowed(real_filename): # type: ignore - return self.app(environ, start_response) - - guessed_type = mimetypes.guess_type(real_filename) # type: ignore - mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, "utf-8") - f, mtime, file_size = file_loader() - - headers = [("Date", http_date())] - - if self.cache: - timeout = self.cache_timeout - etag = self.generate_etag(mtime, file_size, real_filename) # type: ignore - headers += [ - ("Etag", f'"{etag}"'), - ("Cache-Control", f"max-age={timeout}, public"), - ] - - if not is_resource_modified(environ, etag, last_modified=mtime): - f.close() - start_response("304 Not Modified", headers) - return [] - - headers.append(("Expires", http_date(time() + timeout))) - else: - headers.append(("Cache-Control", "public")) - - headers.extend( - ( - ("Content-Type", mime_type), - ("Content-Length", str(file_size)), - ("Last-Modified", http_date(mtime)), - ) - ) - start_response("200 OK", headers) - return wrap_file(environ, f) diff --git a/venv/lib/python3.8/site-packages/werkzeug/py.typed b/venv/lib/python3.8/site-packages/werkzeug/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.8/site-packages/werkzeug/routing.py b/venv/lib/python3.8/site-packages/werkzeug/routing.py deleted file mode 100644 index ddb8ef9..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/routing.py +++ /dev/null @@ -1,2341 +0,0 @@ -"""When it comes to combining multiple controller or view functions -(however you want to call them) you need a dispatcher. A simple way -would be applying regular expression tests on the ``PATH_INFO`` and -calling registered callback functions that return the value then. - -This module implements a much more powerful system than simple regular -expression matching because it can also convert values in the URLs and -build URLs. - -Here a simple example that creates a URL map for an application with -two subdomains (www and kb) and some URL rules: - -.. code-block:: python - - m = Map([ - # Static URLs - Rule('/', endpoint='static/index'), - Rule('/about', endpoint='static/about'), - Rule('/help', endpoint='static/help'), - # Knowledge Base - Subdomain('kb', [ - Rule('/', endpoint='kb/index'), - Rule('/browse/', endpoint='kb/browse'), - Rule('/browse//', endpoint='kb/browse'), - Rule('/browse//', endpoint='kb/browse') - ]) - ], default_subdomain='www') - -If the application doesn't use subdomains it's perfectly fine to not set -the default subdomain and not use the `Subdomain` rule factory. The -endpoint in the rules can be anything, for example import paths or -unique identifiers. The WSGI application can use those endpoints to get the -handler for that URL. It doesn't have to be a string at all but it's -recommended. - -Now it's possible to create a URL adapter for one of the subdomains and -build URLs: - -.. code-block:: python - - c = m.bind('example.com') - - c.build("kb/browse", dict(id=42)) - 'http://kb.example.com/browse/42/' - - c.build("kb/browse", dict()) - 'http://kb.example.com/browse/' - - c.build("kb/browse", dict(id=42, page=3)) - 'http://kb.example.com/browse/42/3' - - c.build("static/about") - '/about' - - c.build("static/index", force_external=True) - 'http://www.example.com/' - - c = m.bind('example.com', subdomain='kb') - - c.build("static/about") - 'http://www.example.com/about' - -The first argument to bind is the server name *without* the subdomain. -Per default it will assume that the script is mounted on the root, but -often that's not the case so you can provide the real mount point as -second argument: - -.. code-block:: python - - c = m.bind('example.com', '/applications/example') - -The third argument can be the subdomain, if not given the default -subdomain is used. For more details about binding have a look at the -documentation of the `MapAdapter`. - -And here is how you can match URLs: - -.. code-block:: python - - c = m.bind('example.com') - - c.match("/") - ('static/index', {}) - - c.match("/about") - ('static/about', {}) - - c = m.bind('example.com', '/', 'kb') - - c.match("/") - ('kb/index', {}) - - c.match("/browse/42/23") - ('kb/browse', {'id': 42, 'page': 23}) - -If matching fails you get a ``NotFound`` exception, if the rule thinks -it's a good idea to redirect (for example because the URL was defined -to have a slash at the end but the request was missing that slash) it -will raise a ``RequestRedirect`` exception. Both are subclasses of -``HTTPException`` so you can use those errors as responses in the -application. - -If matching succeeded but the URL rule was incompatible to the given -method (for example there were only rules for ``GET`` and ``HEAD`` but -routing tried to match a ``POST`` request) a ``MethodNotAllowed`` -exception is raised. -""" -import ast -import difflib -import posixpath -import re -import typing -import typing as t -import uuid -import warnings -from pprint import pformat -from string import Template -from threading import Lock -from types import CodeType - -from ._internal import _encode_idna -from ._internal import _get_environ -from ._internal import _to_bytes -from ._internal import _to_str -from ._internal import _wsgi_decoding_dance -from .datastructures import ImmutableDict -from .datastructures import MultiDict -from .exceptions import BadHost -from .exceptions import BadRequest -from .exceptions import HTTPException -from .exceptions import MethodNotAllowed -from .exceptions import NotFound -from .urls import _fast_url_quote -from .urls import url_encode -from .urls import url_join -from .urls import url_quote -from .urls import url_unquote -from .utils import cached_property -from .utils import redirect -from .wsgi import get_host - -if t.TYPE_CHECKING: - import typing_extensions as te - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - from .wrappers.response import Response - -_rule_re = re.compile( - r""" - (?P[^<]*) # static rule data - < - (?: - (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name - (?:\((?P.*?)\))? # converter arguments - \: # variable delimiter - )? - (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name - > - """, - re.VERBOSE, -) -_simple_rule_re = re.compile(r"<([^>]+)>") -_converter_args_re = re.compile( - r""" - ((?P\w+)\s*=\s*)? - (?P - True|False| - \d+.\d+| - \d+.| - \d+| - [\w\d_.]+| - [urUR]?(?P"[^"]*?"|'[^']*') - )\s*, - """, - re.VERBOSE, -) - - -_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False} - - -def _pythonize(value: str) -> t.Union[None, bool, int, float, str]: - if value in _PYTHON_CONSTANTS: - return _PYTHON_CONSTANTS[value] - for convert in int, float: - try: - return convert(value) # type: ignore - except ValueError: - pass - if value[:1] == value[-1:] and value[0] in "\"'": - value = value[1:-1] - return str(value) - - -def parse_converter_args(argstr: str) -> t.Tuple[t.Tuple, t.Dict[str, t.Any]]: - argstr += "," - args = [] - kwargs = {} - - for item in _converter_args_re.finditer(argstr): - value = item.group("stringval") - if value is None: - value = item.group("value") - value = _pythonize(value) - if not item.group("name"): - args.append(value) - else: - name = item.group("name") - kwargs[name] = value - - return tuple(args), kwargs - - -def parse_rule(rule: str) -> t.Iterator[t.Tuple[t.Optional[str], t.Optional[str], str]]: - """Parse a rule and return it as generator. Each iteration yields tuples - in the form ``(converter, arguments, variable)``. If the converter is - `None` it's a static url part, otherwise it's a dynamic one. - - :internal: - """ - pos = 0 - end = len(rule) - do_match = _rule_re.match - used_names = set() - while pos < end: - m = do_match(rule, pos) - if m is None: - break - data = m.groupdict() - if data["static"]: - yield None, None, data["static"] - variable = data["variable"] - converter = data["converter"] or "default" - if variable in used_names: - raise ValueError(f"variable name {variable!r} used twice.") - used_names.add(variable) - yield converter, data["args"] or None, variable - pos = m.end() - if pos < end: - remaining = rule[pos:] - if ">" in remaining or "<" in remaining: - raise ValueError(f"malformed url rule: {rule!r}") - yield None, None, remaining - - -class RoutingException(Exception): - """Special exceptions that require the application to redirect, notifying - about missing urls, etc. - - :internal: - """ - - -class RequestRedirect(HTTPException, RoutingException): - """Raise if the map requests a redirect. This is for example the case if - `strict_slashes` are activated and an url that requires a trailing slash. - - The attribute `new_url` contains the absolute destination url. - """ - - code = 308 - - def __init__(self, new_url: str) -> None: - super().__init__(new_url) - self.new_url = new_url - - def get_response( - self, - environ: t.Optional["WSGIEnvironment"] = None, - scope: t.Optional[dict] = None, - ) -> "Response": - return redirect(self.new_url, self.code) - - -class RequestPath(RoutingException): - """Internal exception.""" - - __slots__ = ("path_info",) - - def __init__(self, path_info: str) -> None: - super().__init__() - self.path_info = path_info - - -class RequestAliasRedirect(RoutingException): # noqa: B903 - """This rule is an alias and wants to redirect to the canonical URL.""" - - def __init__(self, matched_values: t.Mapping[str, t.Any]) -> None: - super().__init__() - self.matched_values = matched_values - - -class BuildError(RoutingException, LookupError): - """Raised if the build system cannot find a URL for an endpoint with the - values provided. - """ - - def __init__( - self, - endpoint: str, - values: t.Mapping[str, t.Any], - method: t.Optional[str], - adapter: t.Optional["MapAdapter"] = None, - ) -> None: - super().__init__(endpoint, values, method) - self.endpoint = endpoint - self.values = values - self.method = method - self.adapter = adapter - - @cached_property - def suggested(self) -> t.Optional["Rule"]: - return self.closest_rule(self.adapter) - - def closest_rule(self, adapter: t.Optional["MapAdapter"]) -> t.Optional["Rule"]: - def _score_rule(rule: "Rule") -> float: - return sum( - [ - 0.98 - * difflib.SequenceMatcher( - None, rule.endpoint, self.endpoint - ).ratio(), - 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), - 0.01 * bool(rule.methods and self.method in rule.methods), - ] - ) - - if adapter and adapter.map._rules: - return max(adapter.map._rules, key=_score_rule) - - return None - - def __str__(self) -> str: - message = [f"Could not build url for endpoint {self.endpoint!r}"] - if self.method: - message.append(f" ({self.method!r})") - if self.values: - message.append(f" with values {sorted(self.values)!r}") - message.append(".") - if self.suggested: - if self.endpoint == self.suggested.endpoint: - if ( - self.method - and self.suggested.methods is not None - and self.method not in self.suggested.methods - ): - message.append( - " Did you mean to use methods" - f" {sorted(self.suggested.methods)!r}?" - ) - missing_values = self.suggested.arguments.union( - set(self.suggested.defaults or ()) - ) - set(self.values.keys()) - if missing_values: - message.append( - f" Did you forget to specify values {sorted(missing_values)!r}?" - ) - else: - message.append(f" Did you mean {self.suggested.endpoint!r} instead?") - return "".join(message) - - -class WebsocketMismatch(BadRequest): - """The only matched rule is either a WebSocket and the request is - HTTP, or the rule is HTTP and the request is a WebSocket. - """ - - -class ValidationError(ValueError): - """Validation error. If a rule converter raises this exception the rule - does not match the current URL and the next URL is tried. - """ - - -class RuleFactory: - """As soon as you have more complex URL setups it's a good idea to use rule - factories to avoid repetitive tasks. Some of them are builtin, others can - be added by subclassing `RuleFactory` and overriding `get_rules`. - """ - - def get_rules(self, map: "Map") -> t.Iterable["Rule"]: - """Subclasses of `RuleFactory` have to override this method and return - an iterable of rules.""" - raise NotImplementedError() - - -class Subdomain(RuleFactory): - """All URLs provided by this factory have the subdomain set to a - specific domain. For example if you want to use the subdomain for - the current language this can be a good setup:: - - url_map = Map([ - Rule('/', endpoint='#select_language'), - Subdomain('', [ - Rule('/', endpoint='index'), - Rule('/about', endpoint='about'), - Rule('/help', endpoint='help') - ]) - ]) - - All the rules except for the ``'#select_language'`` endpoint will now - listen on a two letter long subdomain that holds the language code - for the current request. - """ - - def __init__(self, subdomain: str, rules: t.Iterable[RuleFactory]) -> None: - self.subdomain = subdomain - self.rules = rules - - def get_rules(self, map: "Map") -> t.Iterator["Rule"]: - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.subdomain = self.subdomain - yield rule - - -class Submount(RuleFactory): - """Like `Subdomain` but prefixes the URL rule with a given string:: - - url_map = Map([ - Rule('/', endpoint='index'), - Submount('/blog', [ - Rule('/', endpoint='blog/index'), - Rule('/entry/', endpoint='blog/show') - ]) - ]) - - Now the rule ``'blog/show'`` matches ``/blog/entry/``. - """ - - def __init__(self, path: str, rules: t.Iterable[RuleFactory]) -> None: - self.path = path.rstrip("/") - self.rules = rules - - def get_rules(self, map: "Map") -> t.Iterator["Rule"]: - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.rule = self.path + rule.rule - yield rule - - -class EndpointPrefix(RuleFactory): - """Prefixes all endpoints (which must be strings for this factory) with - another string. This can be useful for sub applications:: - - url_map = Map([ - Rule('/', endpoint='index'), - EndpointPrefix('blog/', [Submount('/blog', [ - Rule('/', endpoint='index'), - Rule('/entry/', endpoint='show') - ])]) - ]) - """ - - def __init__(self, prefix: str, rules: t.Iterable[RuleFactory]) -> None: - self.prefix = prefix - self.rules = rules - - def get_rules(self, map: "Map") -> t.Iterator["Rule"]: - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.endpoint = self.prefix + rule.endpoint - yield rule - - -class RuleTemplate: - """Returns copies of the rules wrapped and expands string templates in - the endpoint, rule, defaults or subdomain sections. - - Here a small example for such a rule template:: - - from werkzeug.routing import Map, Rule, RuleTemplate - - resource = RuleTemplate([ - Rule('/$name/', endpoint='$name.list'), - Rule('/$name/', endpoint='$name.show') - ]) - - url_map = Map([resource(name='user'), resource(name='page')]) - - When a rule template is called the keyword arguments are used to - replace the placeholders in all the string parameters. - """ - - def __init__(self, rules: t.Iterable["Rule"]) -> None: - self.rules = list(rules) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> "RuleTemplateFactory": - return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) - - -class RuleTemplateFactory(RuleFactory): - """A factory that fills in template variables into rules. Used by - `RuleTemplate` internally. - - :internal: - """ - - def __init__( - self, rules: t.Iterable[RuleFactory], context: t.Dict[str, t.Any] - ) -> None: - self.rules = rules - self.context = context - - def get_rules(self, map: "Map") -> t.Iterator["Rule"]: - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - new_defaults = subdomain = None - if rule.defaults: - new_defaults = {} - for key, value in rule.defaults.items(): - if isinstance(value, str): - value = Template(value).substitute(self.context) - new_defaults[key] = value - if rule.subdomain is not None: - subdomain = Template(rule.subdomain).substitute(self.context) - new_endpoint = rule.endpoint - if isinstance(new_endpoint, str): - new_endpoint = Template(new_endpoint).substitute(self.context) - yield Rule( - Template(rule.rule).substitute(self.context), - new_defaults, - subdomain, - rule.methods, - rule.build_only, - new_endpoint, - rule.strict_slashes, - ) - - -def _prefix_names(src: str) -> ast.stmt: - """ast parse and prefix names with `.` to avoid collision with user vars""" - tree = ast.parse(src).body[0] - if isinstance(tree, ast.Expr): - tree = tree.value # type: ignore - for node in ast.walk(tree): - if isinstance(node, ast.Name): - node.id = f".{node.id}" - return tree - - -_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()" -_IF_KWARGS_URL_ENCODE_CODE = """\ -if kwargs: - q = '?' - params = self._encode_query_vars(kwargs) -else: - q = params = '' -""" -_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) -_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) - - -class Rule(RuleFactory): - """A Rule represents one URL pattern. There are some options for `Rule` - that change the way it behaves and are passed to the `Rule` constructor. - Note that besides the rule-string all arguments *must* be keyword arguments - in order to not break the application on Werkzeug upgrades. - - `string` - Rule strings basically are just normal URL paths with placeholders in - the format ```` where the converter and the - arguments are optional. If no converter is defined the `default` - converter is used which means `string` in the normal configuration. - - URL rules that end with a slash are branch URLs, others are leaves. - If you have `strict_slashes` enabled (which is the default), all - branch URLs that are matched without a trailing slash will trigger a - redirect to the same URL with the missing slash appended. - - The converters are defined on the `Map`. - - `endpoint` - The endpoint for this rule. This can be anything. A reference to a - function, a string, a number etc. The preferred way is using a string - because the endpoint is used for URL generation. - - `defaults` - An optional dict with defaults for other rules with the same endpoint. - This is a bit tricky but useful if you want to have unique URLs:: - - url_map = Map([ - Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), - Rule('/all/page/', endpoint='all_entries') - ]) - - If a user now visits ``http://example.com/all/page/1`` he will be - redirected to ``http://example.com/all/``. If `redirect_defaults` is - disabled on the `Map` instance this will only affect the URL - generation. - - `subdomain` - The subdomain rule string for this rule. If not specified the rule - only matches for the `default_subdomain` of the map. If the map is - not bound to a subdomain this feature is disabled. - - Can be useful if you want to have user profiles on different subdomains - and all subdomains are forwarded to your application:: - - url_map = Map([ - Rule('/', subdomain='', endpoint='user/homepage'), - Rule('/stats', subdomain='', endpoint='user/stats') - ]) - - `methods` - A sequence of http methods this rule applies to. If not specified, all - methods are allowed. For example this can be useful if you want different - endpoints for `POST` and `GET`. If methods are defined and the path - matches but the method matched against is not in this list or in the - list of another rule for that path the error raised is of the type - `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the - list of methods and `HEAD` is not, `HEAD` is added automatically. - - `strict_slashes` - Override the `Map` setting for `strict_slashes` only for this rule. If - not specified the `Map` setting is used. - - `merge_slashes` - Override :attr:`Map.merge_slashes` for this rule. - - `build_only` - Set this to True and the rule will never match but will create a URL - that can be build. This is useful if you have resources on a subdomain - or folder that are not handled by the WSGI application (like static data) - - `redirect_to` - If given this must be either a string or callable. In case of a - callable it's called with the url adapter that triggered the match and - the values of the URL as keyword arguments and has to return the target - for the redirect, otherwise it has to be a string with placeholders in - rule syntax:: - - def foo_with_slug(adapter, id): - # ask the database for the slug for the old id. this of - # course has nothing to do with werkzeug. - return f'foo/{Foo.get_slug_for_id(id)}' - - url_map = Map([ - Rule('/foo/', endpoint='foo'), - Rule('/some/old/url/', redirect_to='foo/'), - Rule('/other/old/url/', redirect_to=foo_with_slug) - ]) - - When the rule is matched the routing system will raise a - `RequestRedirect` exception with the target for the redirect. - - Keep in mind that the URL will be joined against the URL root of the - script so don't use a leading slash on the target URL unless you - really mean root of that domain. - - `alias` - If enabled this rule serves as an alias for another rule with the same - endpoint and arguments. - - `host` - If provided and the URL map has host matching enabled this can be - used to provide a match rule for the whole host. This also means - that the subdomain feature is disabled. - - `websocket` - If ``True``, this rule is only matches for WebSocket (``ws://``, - ``wss://``) requests. By default, rules will only match for HTTP - requests. - - .. versionadded:: 1.0 - Added ``websocket``. - - .. versionadded:: 1.0 - Added ``merge_slashes``. - - .. versionadded:: 0.7 - Added ``alias`` and ``host``. - - .. versionchanged:: 0.6.1 - ``HEAD`` is added to ``methods`` if ``GET`` is present. - """ - - def __init__( - self, - string: str, - defaults: t.Optional[t.Mapping[str, t.Any]] = None, - subdomain: t.Optional[str] = None, - methods: t.Optional[t.Iterable[str]] = None, - build_only: bool = False, - endpoint: t.Optional[str] = None, - strict_slashes: t.Optional[bool] = None, - merge_slashes: t.Optional[bool] = None, - redirect_to: t.Optional[t.Union[str, t.Callable[..., str]]] = None, - alias: bool = False, - host: t.Optional[str] = None, - websocket: bool = False, - ) -> None: - if not string.startswith("/"): - raise ValueError("urls must start with a leading slash") - self.rule = string - self.is_leaf = not string.endswith("/") - - self.map: "Map" = None # type: ignore - self.strict_slashes = strict_slashes - self.merge_slashes = merge_slashes - self.subdomain = subdomain - self.host = host - self.defaults = defaults - self.build_only = build_only - self.alias = alias - self.websocket = websocket - - if methods is not None: - if isinstance(methods, str): - raise TypeError("'methods' should be a list of strings.") - - methods = {x.upper() for x in methods} - - if "HEAD" not in methods and "GET" in methods: - methods.add("HEAD") - - if websocket and methods - {"GET", "HEAD", "OPTIONS"}: - raise ValueError( - "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods." - ) - - self.methods = methods - self.endpoint: str = endpoint # type: ignore - self.redirect_to = redirect_to - - if defaults: - self.arguments = set(map(str, defaults)) - else: - self.arguments = set() - - self._trace: t.List[t.Tuple[bool, str]] = [] - - def empty(self) -> "Rule": - """ - Return an unbound copy of this rule. - - This can be useful if want to reuse an already bound URL for another - map. See ``get_empty_kwargs`` to override what keyword arguments are - provided to the new copy. - """ - return type(self)(self.rule, **self.get_empty_kwargs()) - - def get_empty_kwargs(self) -> t.Mapping[str, t.Any]: - """ - Provides kwargs for instantiating empty copy with empty() - - Use this method to provide custom keyword arguments to the subclass of - ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass - has custom keyword arguments that are needed at instantiation. - - Must return a ``dict`` that will be provided as kwargs to the new - instance of ``Rule``, following the initial ``self.rule`` value which - is always provided as the first, required positional argument. - """ - defaults = None - if self.defaults: - defaults = dict(self.defaults) - return dict( - defaults=defaults, - subdomain=self.subdomain, - methods=self.methods, - build_only=self.build_only, - endpoint=self.endpoint, - strict_slashes=self.strict_slashes, - redirect_to=self.redirect_to, - alias=self.alias, - host=self.host, - ) - - def get_rules(self, map: "Map") -> t.Iterator["Rule"]: - yield self - - def refresh(self) -> None: - """Rebinds and refreshes the URL. Call this if you modified the - rule in place. - - :internal: - """ - self.bind(self.map, rebind=True) - - def bind(self, map: "Map", rebind: bool = False) -> None: - """Bind the url to a map and create a regular expression based on - the information from the rule itself and the defaults from the map. - - :internal: - """ - if self.map is not None and not rebind: - raise RuntimeError(f"url rule {self!r} already bound to map {self.map!r}") - self.map = map - if self.strict_slashes is None: - self.strict_slashes = map.strict_slashes - if self.merge_slashes is None: - self.merge_slashes = map.merge_slashes - if self.subdomain is None: - self.subdomain = map.default_subdomain - self.compile() - - def get_converter( - self, - variable_name: str, - converter_name: str, - args: t.Tuple, - kwargs: t.Mapping[str, t.Any], - ) -> "BaseConverter": - """Looks up the converter for the given parameter. - - .. versionadded:: 0.9 - """ - if converter_name not in self.map.converters: - raise LookupError(f"the converter {converter_name!r} does not exist") - return self.map.converters[converter_name](self.map, *args, **kwargs) - - def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str: - return url_encode( - query_vars, - charset=self.map.charset, - sort=self.map.sort_parameters, - key=self.map.sort_key, - ) - - def compile(self) -> None: - """Compiles the regular expression and stores it.""" - assert self.map is not None, "rule not bound" - - if self.map.host_matching: - domain_rule = self.host or "" - else: - domain_rule = self.subdomain or "" - - self._trace = [] - self._converters: t.Dict[str, "BaseConverter"] = {} - self._static_weights: t.List[t.Tuple[int, int]] = [] - self._argument_weights: t.List[int] = [] - regex_parts = [] - - def _build_regex(rule: str) -> None: - index = 0 - for converter, arguments, variable in parse_rule(rule): - if converter is None: - for match in re.finditer(r"/+|[^/]+", variable): - part = match.group(0) - if part.startswith("/"): - if self.merge_slashes: - regex_parts.append(r"/+?") - self._trace.append((False, "/")) - else: - regex_parts.append(part) - self._trace.append((False, part)) - continue - self._trace.append((False, part)) - regex_parts.append(re.escape(part)) - if part: - self._static_weights.append((index, -len(part))) - else: - if arguments: - c_args, c_kwargs = parse_converter_args(arguments) - else: - c_args = () - c_kwargs = {} - convobj = self.get_converter(variable, converter, c_args, c_kwargs) - regex_parts.append(f"(?P<{variable}>{convobj.regex})") - self._converters[variable] = convobj - self._trace.append((True, variable)) - self._argument_weights.append(convobj.weight) - self.arguments.add(str(variable)) - index = index + 1 - - _build_regex(domain_rule) - regex_parts.append("\\|") - self._trace.append((False, "|")) - _build_regex(self.rule if self.is_leaf else self.rule.rstrip("/")) - if not self.is_leaf: - self._trace.append((False, "/")) - - self._build: t.Callable[..., t.Tuple[str, str]] - self._build = self._compile_builder(False).__get__(self, None) # type: ignore - self._build_unknown: t.Callable[..., t.Tuple[str, str]] - self._build_unknown = self._compile_builder(True).__get__( # type: ignore - self, None - ) - - if self.build_only: - return - - if not (self.is_leaf and self.strict_slashes): - reps = "*" if self.merge_slashes else "?" - tail = f"(?/{reps})" - else: - tail = "" - - regex = f"^{''.join(regex_parts)}{tail}$" - self._regex = re.compile(regex) - - def match( - self, path: str, method: t.Optional[str] = None - ) -> t.Optional[t.MutableMapping[str, t.Any]]: - """Check if the rule matches a given path. Path is a string in the - form ``"subdomain|/path"`` and is assembled by the map. If - the map is doing host matching the subdomain part will be the host - instead. - - If the rule matches a dict with the converted values is returned, - otherwise the return value is `None`. - - :internal: - """ - if not self.build_only: - require_redirect = False - - m = self._regex.search(path) - if m is not None: - groups = m.groupdict() - # we have a folder like part of the url without a trailing - # slash and strict slashes enabled. raise an exception that - # tells the map to redirect to the same url but with a - # trailing slash - if ( - self.strict_slashes - and not self.is_leaf - and not groups.pop("__suffix__") - and ( - method is None or self.methods is None or method in self.methods - ) - ): - path += "/" - require_redirect = True - # if we are not in strict slashes mode we have to remove - # a __suffix__ - elif not self.strict_slashes: - del groups["__suffix__"] - - result = {} - for name, value in groups.items(): - try: - value = self._converters[name].to_python(value) - except ValidationError: - return None - result[str(name)] = value - if self.defaults: - result.update(self.defaults) - - if self.merge_slashes: - new_path = "|".join(self.build(result, False)) # type: ignore - if path.endswith("/") and not new_path.endswith("/"): - new_path += "/" - if new_path.count("/") < path.count("/"): - # The URL will be encoded when MapAdapter.match - # handles the RequestPath raised below. Decode - # the URL here to avoid a double encoding. - path = url_unquote(new_path) - require_redirect = True - - if require_redirect: - path = path.split("|", 1)[1] - raise RequestPath(path) - - if self.alias and self.map.redirect_defaults: - raise RequestAliasRedirect(result) - - return result - - return None - - @staticmethod - def _get_func_code(code: CodeType, name: str) -> t.Callable[..., t.Tuple[str, str]]: - globs: t.Dict[str, t.Any] = {} - locs: t.Dict[str, t.Any] = {} - exec(code, globs, locs) - return locs[name] # type: ignore - - def _compile_builder( - self, append_unknown: bool = True - ) -> t.Callable[..., t.Tuple[str, str]]: - defaults = self.defaults or {} - dom_ops: t.List[t.Tuple[bool, str]] = [] - url_ops: t.List[t.Tuple[bool, str]] = [] - - opl = dom_ops - for is_dynamic, data in self._trace: - if data == "|" and opl is dom_ops: - opl = url_ops - continue - # this seems like a silly case to ever come up but: - # if a default is given for a value that appears in the rule, - # resolve it to a constant ahead of time - if is_dynamic and data in defaults: - data = self._converters[data].to_url(defaults[data]) - opl.append((False, data)) - elif not is_dynamic: - opl.append( - (False, url_quote(_to_bytes(data, self.map.charset), safe="/:|+")) - ) - else: - opl.append((True, data)) - - def _convert(elem: str) -> ast.stmt: - ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) - ret.args = [ast.Name(str(elem), ast.Load())] # type: ignore # str for py2 - return ret - - def _parts(ops: t.List[t.Tuple[bool, str]]) -> t.List[ast.AST]: - parts = [ - _convert(elem) if is_dynamic else ast.Str(s=elem) - for is_dynamic, elem in ops - ] - parts = parts or [ast.Str("")] - # constant fold - ret = [parts[0]] - for p in parts[1:]: - if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str): - ret[-1] = ast.Str(ret[-1].s + p.s) - else: - ret.append(p) - return ret - - dom_parts = _parts(dom_ops) - url_parts = _parts(url_ops) - if not append_unknown: - body = [] - else: - body = [_IF_KWARGS_URL_ENCODE_AST] - url_parts.extend(_URL_ENCODE_AST_NAMES) - - def _join(parts: t.List[ast.AST]) -> ast.AST: - if len(parts) == 1: # shortcut - return parts[0] - return ast.JoinedStr(parts) - - body.append( - ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())) - ) - - pargs = [ - elem - for is_dynamic, elem in dom_ops + url_ops - if is_dynamic and elem not in defaults - ] - kargs = [str(k) for k in defaults] - - func_ast: ast.FunctionDef = _prefix_names("def _(): pass") # type: ignore - func_ast.name = f"" - func_ast.args.args.append(ast.arg(".self", None)) - for arg in pargs + kargs: - func_ast.args.args.append(ast.arg(arg, None)) - func_ast.args.kwarg = ast.arg(".kwargs", None) - for _ in kargs: - func_ast.args.defaults.append(ast.Str("")) - func_ast.body = body - - # use `ast.parse` instead of `ast.Module` for better portability - # Python 3.8 changes the signature of `ast.Module` - module = ast.parse("") - module.body = [func_ast] - - # mark everything as on line 1, offset 0 - # less error-prone than `ast.fix_missing_locations` - # bad line numbers cause an assert to fail in debug builds - for node in ast.walk(module): - if "lineno" in node._attributes: - node.lineno = 1 - if "col_offset" in node._attributes: - node.col_offset = 0 - - code = compile(module, "", "exec") - return self._get_func_code(code, func_ast.name) - - def build( - self, values: t.Mapping[str, t.Any], append_unknown: bool = True - ) -> t.Optional[t.Tuple[str, str]]: - """Assembles the relative url for that rule and the subdomain. - If building doesn't work for some reasons `None` is returned. - - :internal: - """ - try: - if append_unknown: - return self._build_unknown(**values) - else: - return self._build(**values) - except ValidationError: - return None - - def provides_defaults_for(self, rule: "Rule") -> bool: - """Check if this rule has defaults for a given rule. - - :internal: - """ - return bool( - not self.build_only - and self.defaults - and self.endpoint == rule.endpoint - and self != rule - and self.arguments == rule.arguments - ) - - def suitable_for( - self, values: t.Mapping[str, t.Any], method: t.Optional[str] = None - ) -> bool: - """Check if the dict of values has enough data for url generation. - - :internal: - """ - # if a method was given explicitly and that method is not supported - # by this rule, this rule is not suitable. - if ( - method is not None - and self.methods is not None - and method not in self.methods - ): - return False - - defaults = self.defaults or () - - # all arguments required must be either in the defaults dict or - # the value dictionary otherwise it's not suitable - for key in self.arguments: - if key not in defaults and key not in values: - return False - - # in case defaults are given we ensure that either the value was - # skipped or the value is the same as the default value. - if defaults: - for key, value in defaults.items(): - if key in values and value != values[key]: - return False - - return True - - def match_compare_key( - self, - ) -> t.Tuple[bool, int, t.Iterable[t.Tuple[int, int]], int, t.Iterable[int]]: - """The match compare key for sorting. - - Current implementation: - - 1. rules without any arguments come first for performance - reasons only as we expect them to match faster and some - common ones usually don't have any arguments (index pages etc.) - 2. rules with more static parts come first so the second argument - is the negative length of the number of the static weights. - 3. we order by static weights, which is a combination of index - and length - 4. The more complex rules come first so the next argument is the - negative length of the number of argument weights. - 5. lastly we order by the actual argument weights. - - :internal: - """ - return ( - bool(self.arguments), - -len(self._static_weights), - self._static_weights, - -len(self._argument_weights), - self._argument_weights, - ) - - def build_compare_key(self) -> t.Tuple[int, int, int]: - """The build compare key for sorting. - - :internal: - """ - return (1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ())) - - def __eq__(self, other: object) -> bool: - return isinstance(other, type(self)) and self._trace == other._trace - - __hash__ = None # type: ignore - - def __str__(self) -> str: - return self.rule - - def __repr__(self) -> str: - if self.map is None: - return f"<{type(self).__name__} (unbound)>" - parts = [] - for is_dynamic, data in self._trace: - if is_dynamic: - parts.append(f"<{data}>") - else: - parts.append(data) - parts = "".join(parts).lstrip("|") - methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" - return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" - - -class BaseConverter: - """Base class for all converters.""" - - regex = "[^/]+" - weight = 100 - - def __init__(self, map: "Map", *args: t.Any, **kwargs: t.Any) -> None: - self.map = map - - def to_python(self, value: str) -> t.Any: - return value - - def to_url(self, value: t.Any) -> str: - if isinstance(value, (bytes, bytearray)): - return _fast_url_quote(value) - return _fast_url_quote(str(value).encode(self.map.charset)) - - -class UnicodeConverter(BaseConverter): - """This converter is the default converter and accepts any string but - only one path segment. Thus the string can not include a slash. - - This is the default validator. - - Example:: - - Rule('/pages/'), - Rule('/') - - :param map: the :class:`Map`. - :param minlength: the minimum length of the string. Must be greater - or equal 1. - :param maxlength: the maximum length of the string. - :param length: the exact length of the string. - """ - - def __init__( - self, - map: "Map", - minlength: int = 1, - maxlength: t.Optional[int] = None, - length: t.Optional[int] = None, - ) -> None: - super().__init__(map) - if length is not None: - length_regex = f"{{{int(length)}}}" - else: - if maxlength is None: - maxlength_value = "" - else: - maxlength_value = str(int(maxlength)) - length_regex = f"{{{int(minlength)},{maxlength_value}}}" - self.regex = f"[^/]{length_regex}" - - -class AnyConverter(BaseConverter): - """Matches one of the items provided. Items can either be Python - identifiers or strings:: - - Rule('/') - - :param map: the :class:`Map`. - :param items: this function accepts the possible items as positional - arguments. - """ - - def __init__(self, map: "Map", *items: str) -> None: - super().__init__(map) - self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})" - - -class PathConverter(BaseConverter): - """Like the default :class:`UnicodeConverter`, but it also matches - slashes. This is useful for wikis and similar applications:: - - Rule('/') - Rule('//edit') - - :param map: the :class:`Map`. - """ - - regex = "[^/].*?" - weight = 200 - - -class NumberConverter(BaseConverter): - """Baseclass for `IntegerConverter` and `FloatConverter`. - - :internal: - """ - - weight = 50 - num_convert: t.Callable = int - - def __init__( - self, - map: "Map", - fixed_digits: int = 0, - min: t.Optional[int] = None, - max: t.Optional[int] = None, - signed: bool = False, - ) -> None: - if signed: - self.regex = self.signed_regex - super().__init__(map) - self.fixed_digits = fixed_digits - self.min = min - self.max = max - self.signed = signed - - def to_python(self, value: str) -> t.Any: - if self.fixed_digits and len(value) != self.fixed_digits: - raise ValidationError() - value = self.num_convert(value) - if (self.min is not None and value < self.min) or ( - self.max is not None and value > self.max - ): - raise ValidationError() - return value - - def to_url(self, value: t.Any) -> str: - value = str(self.num_convert(value)) - if self.fixed_digits: - value = value.zfill(self.fixed_digits) - return value - - @property - def signed_regex(self) -> str: - return f"-?{self.regex}" - - -class IntegerConverter(NumberConverter): - """This converter only accepts integer values:: - - Rule("/page/") - - By default it only accepts unsigned, positive values. The ``signed`` - parameter will enable signed, negative values. :: - - Rule("/page/") - - :param map: The :class:`Map`. - :param fixed_digits: The number of fixed digits in the URL. If you - set this to ``4`` for example, the rule will only match if the - URL looks like ``/0001/``. The default is variable length. - :param min: The minimal value. - :param max: The maximal value. - :param signed: Allow signed (negative) values. - - .. versionadded:: 0.15 - The ``signed`` parameter. - """ - - regex = r"\d+" - - -class FloatConverter(NumberConverter): - """This converter only accepts floating point values:: - - Rule("/probability/") - - By default it only accepts unsigned, positive values. The ``signed`` - parameter will enable signed, negative values. :: - - Rule("/offset/") - - :param map: The :class:`Map`. - :param min: The minimal value. - :param max: The maximal value. - :param signed: Allow signed (negative) values. - - .. versionadded:: 0.15 - The ``signed`` parameter. - """ - - regex = r"\d+\.\d+" - num_convert = float - - def __init__( - self, - map: "Map", - min: t.Optional[float] = None, - max: t.Optional[float] = None, - signed: bool = False, - ) -> None: - super().__init__(map, min=min, max=max, signed=signed) # type: ignore - - -class UUIDConverter(BaseConverter): - """This converter only accepts UUID strings:: - - Rule('/object/') - - .. versionadded:: 0.10 - - :param map: the :class:`Map`. - """ - - regex = ( - r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-" - r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}" - ) - - def to_python(self, value: str) -> uuid.UUID: - return uuid.UUID(value) - - def to_url(self, value: uuid.UUID) -> str: - return str(value) - - -#: the default converter mapping for the map. -DEFAULT_CONVERTERS: t.Mapping[str, t.Type[BaseConverter]] = { - "default": UnicodeConverter, - "string": UnicodeConverter, - "any": AnyConverter, - "path": PathConverter, - "int": IntegerConverter, - "float": FloatConverter, - "uuid": UUIDConverter, -} - - -class Map: - """The map class stores all the URL rules and some configuration - parameters. Some of the configuration values are only stored on the - `Map` instance since those affect all rules, others are just defaults - and can be overridden for each rule. Note that you have to specify all - arguments besides the `rules` as keyword arguments! - - :param rules: sequence of url rules for this map. - :param default_subdomain: The default subdomain for rules without a - subdomain defined. - :param charset: charset of the url. defaults to ``"utf-8"`` - :param strict_slashes: If a rule ends with a slash but the matched - URL does not, redirect to the URL with a trailing slash. - :param merge_slashes: Merge consecutive slashes when matching or - building URLs. Matches will redirect to the normalized URL. - Slashes in variable parts are not merged. - :param redirect_defaults: This will redirect to the default rule if it - wasn't visited that way. This helps creating - unique URLs. - :param converters: A dict of converters that adds additional converters - to the list of converters. If you redefine one - converter this will override the original one. - :param sort_parameters: If set to `True` the url parameters are sorted. - See `url_encode` for more details. - :param sort_key: The sort key function for `url_encode`. - :param encoding_errors: the error method to use for decoding - :param host_matching: if set to `True` it enables the host matching - feature and disables the subdomain one. If - enabled the `host` parameter to rules is used - instead of the `subdomain` one. - - .. versionchanged:: 1.0 - If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules - will match. - - .. versionchanged:: 1.0 - Added ``merge_slashes``. - - .. versionchanged:: 0.7 - Added ``encoding_errors`` and ``host_matching``. - - .. versionchanged:: 0.5 - Added ``sort_parameters`` and ``sort_key``. - """ - - #: A dict of default converters to be used. - default_converters = ImmutableDict(DEFAULT_CONVERTERS) - - #: The type of lock to use when updating. - #: - #: .. versionadded:: 1.0 - lock_class = Lock - - def __init__( - self, - rules: t.Optional[t.Iterable[RuleFactory]] = None, - default_subdomain: str = "", - charset: str = "utf-8", - strict_slashes: bool = True, - merge_slashes: bool = True, - redirect_defaults: bool = True, - converters: t.Optional[t.Mapping[str, t.Type[BaseConverter]]] = None, - sort_parameters: bool = False, - sort_key: t.Optional[t.Callable[[t.Any], t.Any]] = None, - encoding_errors: str = "replace", - host_matching: bool = False, - ) -> None: - self._rules: t.List[Rule] = [] - self._rules_by_endpoint: t.Dict[str, t.List[Rule]] = {} - self._remap = True - self._remap_lock = self.lock_class() - - self.default_subdomain = default_subdomain - self.charset = charset - self.encoding_errors = encoding_errors - self.strict_slashes = strict_slashes - self.merge_slashes = merge_slashes - self.redirect_defaults = redirect_defaults - self.host_matching = host_matching - - self.converters = self.default_converters.copy() - if converters: - self.converters.update(converters) - - self.sort_parameters = sort_parameters - self.sort_key = sort_key - - for rulefactory in rules or (): - self.add(rulefactory) - - def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: - """Iterate over all rules and check if the endpoint expects - the arguments provided. This is for example useful if you have - some URLs that expect a language code and others that do not and - you want to wrap the builder a bit so that the current language - code is automatically added if not provided but endpoints expect - it. - - :param endpoint: the endpoint to check. - :param arguments: this function accepts one or more arguments - as positional arguments. Each one of them is - checked. - """ - self.update() - arguments = set(arguments) - for rule in self._rules_by_endpoint[endpoint]: - if arguments.issubset(rule.arguments): - return True - return False - - def iter_rules(self, endpoint: t.Optional[str] = None) -> t.Iterator[Rule]: - """Iterate over all rules or the rules of an endpoint. - - :param endpoint: if provided only the rules for that endpoint - are returned. - :return: an iterator - """ - self.update() - if endpoint is not None: - return iter(self._rules_by_endpoint[endpoint]) - return iter(self._rules) - - def add(self, rulefactory: RuleFactory) -> None: - """Add a new rule or factory to the map and bind it. Requires that the - rule is not bound to another map. - - :param rulefactory: a :class:`Rule` or :class:`RuleFactory` - """ - for rule in rulefactory.get_rules(self): - rule.bind(self) - self._rules.append(rule) - self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) - self._remap = True - - def bind( - self, - server_name: str, - script_name: t.Optional[str] = None, - subdomain: t.Optional[str] = None, - url_scheme: str = "http", - default_method: str = "GET", - path_info: t.Optional[str] = None, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - ) -> "MapAdapter": - """Return a new :class:`MapAdapter` with the details specified to the - call. Note that `script_name` will default to ``'/'`` if not further - specified or `None`. The `server_name` at least is a requirement - because the HTTP RFC requires absolute URLs for redirects and so all - redirect exceptions raised by Werkzeug will contain the full canonical - URL. - - If no path_info is passed to :meth:`match` it will use the default path - info passed to bind. While this doesn't really make sense for - manual bind calls, it's useful if you bind a map to a WSGI - environment which already contains the path info. - - `subdomain` will default to the `default_subdomain` for this map if - no defined. If there is no `default_subdomain` you cannot use the - subdomain feature. - - .. versionchanged:: 1.0 - If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules - will match. - - .. versionchanged:: 0.15 - ``path_info`` defaults to ``'/'`` if ``None``. - - .. versionchanged:: 0.8 - ``query_args`` can be a string. - - .. versionchanged:: 0.7 - Added ``query_args``. - """ - server_name = server_name.lower() - if self.host_matching: - if subdomain is not None: - raise RuntimeError("host matching enabled and a subdomain was provided") - elif subdomain is None: - subdomain = self.default_subdomain - if script_name is None: - script_name = "/" - if path_info is None: - path_info = "/" - - try: - server_name = _encode_idna(server_name) # type: ignore - except UnicodeError as e: - raise BadHost() from e - - return MapAdapter( - self, - server_name, - script_name, - subdomain, - url_scheme, - path_info, - default_method, - query_args, - ) - - def bind_to_environ( - self, - environ: "WSGIEnvironment", - server_name: t.Optional[str] = None, - subdomain: t.Optional[str] = None, - ) -> "MapAdapter": - """Like :meth:`bind` but you can pass it an WSGI environment and it - will fetch the information from that dictionary. Note that because of - limitations in the protocol there is no way to get the current - subdomain and real `server_name` from the environment. If you don't - provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or - `HTTP_HOST` if provided) as used `server_name` with disabled subdomain - feature. - - If `subdomain` is `None` but an environment and a server name is - provided it will calculate the current subdomain automatically. - Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` - in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated - subdomain will be ``'staging.dev'``. - - If the object passed as environ has an environ attribute, the value of - this attribute is used instead. This allows you to pass request - objects. Additionally `PATH_INFO` added as a default of the - :class:`MapAdapter` so that you don't have to pass the path info to - the match method. - - .. versionchanged:: 1.0.0 - If the passed server name specifies port 443, it will match - if the incoming scheme is ``https`` without a port. - - .. versionchanged:: 1.0.0 - A warning is shown when the passed server name does not - match the incoming WSGI server name. - - .. versionchanged:: 0.8 - This will no longer raise a ValueError when an unexpected server - name was passed. - - .. versionchanged:: 0.5 - previously this method accepted a bogus `calculate_subdomain` - parameter that did not have any effect. It was removed because - of that. - - :param environ: a WSGI environment. - :param server_name: an optional server name hint (see above). - :param subdomain: optionally the current subdomain (see above). - """ - environ = _get_environ(environ) - wsgi_server_name = get_host(environ).lower() - scheme = environ["wsgi.url_scheme"] - upgrade = any( - v.strip() == "upgrade" - for v in environ.get("HTTP_CONNECTION", "").lower().split(",") - ) - - if upgrade and environ.get("HTTP_UPGRADE", "").lower() == "websocket": - scheme = "wss" if scheme == "https" else "ws" - - if server_name is None: - server_name = wsgi_server_name - else: - server_name = server_name.lower() - - # strip standard port to match get_host() - if scheme in {"http", "ws"} and server_name.endswith(":80"): - server_name = server_name[:-3] - elif scheme in {"https", "wss"} and server_name.endswith(":443"): - server_name = server_name[:-4] - - if subdomain is None and not self.host_matching: - cur_server_name = wsgi_server_name.split(".") - real_server_name = server_name.split(".") - offset = -len(real_server_name) - - if cur_server_name[offset:] != real_server_name: - # This can happen even with valid configs if the server was - # accessed directly by IP address under some situations. - # Instead of raising an exception like in Werkzeug 0.7 or - # earlier we go by an invalid subdomain which will result - # in a 404 error on matching. - warnings.warn( - f"Current server name {wsgi_server_name!r} doesn't match configured" - f" server name {server_name!r}", - stacklevel=2, - ) - subdomain = "" - else: - subdomain = ".".join(filter(None, cur_server_name[:offset])) - - def _get_wsgi_string(name: str) -> t.Optional[str]: - val = environ.get(name) - if val is not None: - return _wsgi_decoding_dance(val, self.charset) - return None - - script_name = _get_wsgi_string("SCRIPT_NAME") - path_info = _get_wsgi_string("PATH_INFO") - query_args = _get_wsgi_string("QUERY_STRING") - return Map.bind( - self, - server_name, - script_name, - subdomain, - scheme, - environ["REQUEST_METHOD"], - path_info, - query_args=query_args, - ) - - def update(self) -> None: - """Called before matching and building to keep the compiled rules - in the correct order after things changed. - """ - if not self._remap: - return - - with self._remap_lock: - if not self._remap: - return - - self._rules.sort(key=lambda x: x.match_compare_key()) - for rules in self._rules_by_endpoint.values(): - rules.sort(key=lambda x: x.build_compare_key()) - self._remap = False - - def __repr__(self) -> str: - rules = self.iter_rules() - return f"{type(self).__name__}({pformat(list(rules))})" - - -class MapAdapter: - - """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does - the URL matching and building based on runtime information. - """ - - def __init__( - self, - map: Map, - server_name: str, - script_name: str, - subdomain: t.Optional[str], - url_scheme: str, - path_info: str, - default_method: str, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - ): - self.map = map - self.server_name = _to_str(server_name) - script_name = _to_str(script_name) - if not script_name.endswith("/"): - script_name += "/" - self.script_name = script_name - self.subdomain = _to_str(subdomain) - self.url_scheme = _to_str(url_scheme) - self.path_info = _to_str(path_info) - self.default_method = _to_str(default_method) - self.query_args = query_args - self.websocket = self.url_scheme in {"ws", "wss"} - - def dispatch( - self, - view_func: t.Callable[[str, t.Mapping[str, t.Any]], "WSGIApplication"], - path_info: t.Optional[str] = None, - method: t.Optional[str] = None, - catch_http_exceptions: bool = False, - ) -> "WSGIApplication": - """Does the complete dispatching process. `view_func` is called with - the endpoint and a dict with the values for the view. It should - look up the view function, call it, and return a response object - or WSGI application. http exceptions are not caught by default - so that applications can display nicer error messages by just - catching them by hand. If you want to stick with the default - error messages you can pass it ``catch_http_exceptions=True`` and - it will catch the http exceptions. - - Here a small example for the dispatch usage:: - - from werkzeug.wrappers import Request, Response - from werkzeug.wsgi import responder - from werkzeug.routing import Map, Rule - - def on_index(request): - return Response('Hello from the index') - - url_map = Map([Rule('/', endpoint='index')]) - views = {'index': on_index} - - @responder - def application(environ, start_response): - request = Request(environ) - urls = url_map.bind_to_environ(environ) - return urls.dispatch(lambda e, v: views[e](request, **v), - catch_http_exceptions=True) - - Keep in mind that this method might return exception objects, too, so - use :class:`Response.force_type` to get a response object. - - :param view_func: a function that is called with the endpoint as - first argument and the value dict as second. Has - to dispatch to the actual view function with this - information. (see above) - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - :param catch_http_exceptions: set to `True` to catch any of the - werkzeug :class:`HTTPException`\\s. - """ - try: - try: - endpoint, args = self.match(path_info, method) - except RequestRedirect as e: - return e - return view_func(endpoint, args) - except HTTPException as e: - if catch_http_exceptions: - return e - raise - - @typing.overload - def match( # type: ignore - self, - path_info: t.Optional[str] = None, - method: t.Optional[str] = None, - return_rule: "te.Literal[False]" = False, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - websocket: t.Optional[bool] = None, - ) -> t.Tuple[str, t.Mapping[str, t.Any]]: - ... - - @typing.overload - def match( - self, - path_info: t.Optional[str] = None, - method: t.Optional[str] = None, - return_rule: "te.Literal[True]" = True, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - websocket: t.Optional[bool] = None, - ) -> t.Tuple[Rule, t.Mapping[str, t.Any]]: - ... - - def match( - self, - path_info: t.Optional[str] = None, - method: t.Optional[str] = None, - return_rule: bool = False, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - websocket: t.Optional[bool] = None, - ) -> t.Tuple[t.Union[str, Rule], t.Mapping[str, t.Any]]: - """The usage is simple: you just pass the match method the current - path info as well as the method (which defaults to `GET`). The - following things can then happen: - - - you receive a `NotFound` exception that indicates that no URL is - matching. A `NotFound` exception is also a WSGI application you - can call to get a default page not found page (happens to be the - same object as `werkzeug.exceptions.NotFound`) - - - you receive a `MethodNotAllowed` exception that indicates that there - is a match for this URL but not for the current request method. - This is useful for RESTful applications. - - - you receive a `RequestRedirect` exception with a `new_url` - attribute. This exception is used to notify you about a request - Werkzeug requests from your WSGI application. This is for example the - case if you request ``/foo`` although the correct URL is ``/foo/`` - You can use the `RequestRedirect` instance as response-like object - similar to all other subclasses of `HTTPException`. - - - you receive a ``WebsocketMismatch`` exception if the only - match is a WebSocket rule but the bind is an HTTP request, or - if the match is an HTTP rule but the bind is a WebSocket - request. - - - you get a tuple in the form ``(endpoint, arguments)`` if there is - a match (unless `return_rule` is True, in which case you get a tuple - in the form ``(rule, arguments)``) - - If the path info is not passed to the match method the default path - info of the map is used (defaults to the root URL if not defined - explicitly). - - All of the exceptions raised are subclasses of `HTTPException` so they - can be used as WSGI responses. They will all render generic error or - redirect pages. - - Here is a small example for matching: - - >>> m = Map([ - ... Rule('/', endpoint='index'), - ... Rule('/downloads/', endpoint='downloads/index'), - ... Rule('/downloads/', endpoint='downloads/show') - ... ]) - >>> urls = m.bind("example.com", "/") - >>> urls.match("/", "GET") - ('index', {}) - >>> urls.match("/downloads/42") - ('downloads/show', {'id': 42}) - - And here is what happens on redirect and missing URLs: - - >>> urls.match("/downloads") - Traceback (most recent call last): - ... - RequestRedirect: http://example.com/downloads/ - >>> urls.match("/missing") - Traceback (most recent call last): - ... - NotFound: 404 Not Found - - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - :param return_rule: return the rule that matched instead of just the - endpoint (defaults to `False`). - :param query_args: optional query arguments that are used for - automatic redirects as string or dictionary. It's - currently not possible to use the query arguments - for URL matching. - :param websocket: Match WebSocket instead of HTTP requests. A - websocket request has a ``ws`` or ``wss`` - :attr:`url_scheme`. This overrides that detection. - - .. versionadded:: 1.0 - Added ``websocket``. - - .. versionchanged:: 0.8 - ``query_args`` can be a string. - - .. versionadded:: 0.7 - Added ``query_args``. - - .. versionadded:: 0.6 - Added ``return_rule``. - """ - self.map.update() - if path_info is None: - path_info = self.path_info - else: - path_info = _to_str(path_info, self.map.charset) - if query_args is None: - query_args = self.query_args or {} - method = (method or self.default_method).upper() - - if websocket is None: - websocket = self.websocket - - require_redirect = False - - domain_part = self.server_name if self.map.host_matching else self.subdomain - path_part = f"/{path_info.lstrip('/')}" if path_info else "" - path = f"{domain_part}|{path_part}" - - have_match_for = set() - websocket_mismatch = False - - for rule in self.map._rules: - try: - rv = rule.match(path, method) - except RequestPath as e: - raise RequestRedirect( - self.make_redirect_url( - url_quote(e.path_info, self.map.charset, safe="/:|+"), - query_args, - ) - ) from None - except RequestAliasRedirect as e: - raise RequestRedirect( - self.make_alias_redirect_url( - path, rule.endpoint, e.matched_values, method, query_args - ) - ) from None - if rv is None: - continue - if rule.methods is not None and method not in rule.methods: - have_match_for.update(rule.methods) - continue - - if rule.websocket != websocket: - websocket_mismatch = True - continue - - if self.map.redirect_defaults: - redirect_url = self.get_default_redirect(rule, method, rv, query_args) - if redirect_url is not None: - raise RequestRedirect(redirect_url) - - if rule.redirect_to is not None: - if isinstance(rule.redirect_to, str): - - def _handle_match(match: t.Match[str]) -> str: - value = rv[match.group(1)] # type: ignore - return rule._converters[match.group(1)].to_url(value) - - redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to) - else: - redirect_url = rule.redirect_to(self, **rv) - - if self.subdomain: - netloc = f"{self.subdomain}.{self.server_name}" - else: - netloc = self.server_name - - raise RequestRedirect( - url_join( - f"{self.url_scheme or 'http'}://{netloc}{self.script_name}", - redirect_url, - ) - ) - - if require_redirect: - raise RequestRedirect( - self.make_redirect_url( - url_quote(path_info, self.map.charset, safe="/:|+"), query_args - ) - ) - - if return_rule: - return rule, rv - else: - return rule.endpoint, rv - - if have_match_for: - raise MethodNotAllowed(valid_methods=list(have_match_for)) - - if websocket_mismatch: - raise WebsocketMismatch() - - raise NotFound() - - def test( - self, path_info: t.Optional[str] = None, method: t.Optional[str] = None - ) -> bool: - """Test if a rule would match. Works like `match` but returns `True` - if the URL matches, or `False` if it does not exist. - - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - """ - try: - self.match(path_info, method) - except RequestRedirect: - pass - except HTTPException: - return False - return True - - def allowed_methods(self, path_info: t.Optional[str] = None) -> t.Iterable[str]: - """Returns the valid methods that match for a given path. - - .. versionadded:: 0.7 - """ - try: - self.match(path_info, method="--") - except MethodNotAllowed as e: - return e.valid_methods # type: ignore - except HTTPException: - pass - return [] - - def get_host(self, domain_part: t.Optional[str]) -> str: - """Figures out the full host name for the given domain part. The - domain part is a subdomain in case host matching is disabled or - a full host name. - """ - if self.map.host_matching: - if domain_part is None: - return self.server_name - return _to_str(domain_part, "ascii") - subdomain = domain_part - if subdomain is None: - subdomain = self.subdomain - else: - subdomain = _to_str(subdomain, "ascii") - - if subdomain: - return f"{subdomain}.{self.server_name}" - else: - return self.server_name - - def get_default_redirect( - self, - rule: Rule, - method: str, - values: t.MutableMapping[str, t.Any], - query_args: t.Union[t.Mapping[str, t.Any], str], - ) -> t.Optional[str]: - """A helper that returns the URL to redirect to if it finds one. - This is used for default redirecting only. - - :internal: - """ - assert self.map.redirect_defaults - for r in self.map._rules_by_endpoint[rule.endpoint]: - # every rule that comes after this one, including ourself - # has a lower priority for the defaults. We order the ones - # with the highest priority up for building. - if r is rule: - break - if r.provides_defaults_for(rule) and r.suitable_for(values, method): - values.update(r.defaults) # type: ignore - domain_part, path = r.build(values) # type: ignore - return self.make_redirect_url(path, query_args, domain_part=domain_part) - return None - - def encode_query_args(self, query_args: t.Union[t.Mapping[str, t.Any], str]) -> str: - if not isinstance(query_args, str): - return url_encode(query_args, self.map.charset) - return query_args - - def make_redirect_url( - self, - path_info: str, - query_args: t.Optional[t.Union[t.Mapping[str, t.Any], str]] = None, - domain_part: t.Optional[str] = None, - ) -> str: - """Creates a redirect URL. - - :internal: - """ - if query_args: - suffix = f"?{self.encode_query_args(query_args)}" - else: - suffix = "" - - scheme = self.url_scheme or "http" - host = self.get_host(domain_part) - path = posixpath.join(self.script_name.strip("/"), path_info.lstrip("/")) - return f"{scheme}://{host}/{path}{suffix}" - - def make_alias_redirect_url( - self, - path: str, - endpoint: str, - values: t.Mapping[str, t.Any], - method: str, - query_args: t.Union[t.Mapping[str, t.Any], str], - ) -> str: - """Internally called to make an alias redirect URL.""" - url = self.build( - endpoint, values, method, append_unknown=False, force_external=True - ) - if query_args: - url += f"?{self.encode_query_args(query_args)}" - assert url != path, "detected invalid alias setting. No canonical URL found" - return url - - def _partial_build( - self, - endpoint: str, - values: t.Mapping[str, t.Any], - method: t.Optional[str], - append_unknown: bool, - ) -> t.Optional[t.Tuple[str, str, bool]]: - """Helper for :meth:`build`. Returns subdomain and path for the - rule that accepts this endpoint, values and method. - - :internal: - """ - # in case the method is none, try with the default method first - if method is None: - rv = self._partial_build( - endpoint, values, self.default_method, append_unknown - ) - if rv is not None: - return rv - - # Default method did not match or a specific method is passed. - # Check all for first match with matching host. If no matching - # host is found, go with first result. - first_match = None - - for rule in self.map._rules_by_endpoint.get(endpoint, ()): - if rule.suitable_for(values, method): - build_rv = rule.build(values, append_unknown) - - if build_rv is not None: - rv = (build_rv[0], build_rv[1], rule.websocket) - if self.map.host_matching: - if rv[0] == self.server_name: - return rv - elif first_match is None: - first_match = rv - else: - return rv - - return first_match - - def build( - self, - endpoint: str, - values: t.Optional[t.Mapping[str, t.Any]] = None, - method: t.Optional[str] = None, - force_external: bool = False, - append_unknown: bool = True, - url_scheme: t.Optional[str] = None, - ) -> str: - """Building URLs works pretty much the other way round. Instead of - `match` you call `build` and pass it the endpoint and a dict of - arguments for the placeholders. - - The `build` function also accepts an argument called `force_external` - which, if you set it to `True` will force external URLs. Per default - external URLs (include the server name) will only be used if the - target URL is on a different subdomain. - - >>> m = Map([ - ... Rule('/', endpoint='index'), - ... Rule('/downloads/', endpoint='downloads/index'), - ... Rule('/downloads/', endpoint='downloads/show') - ... ]) - >>> urls = m.bind("example.com", "/") - >>> urls.build("index", {}) - '/' - >>> urls.build("downloads/show", {'id': 42}) - '/downloads/42' - >>> urls.build("downloads/show", {'id': 42}, force_external=True) - 'http://example.com/downloads/42' - - Because URLs cannot contain non ASCII data you will always get - bytes back. Non ASCII characters are urlencoded with the - charset defined on the map instance. - - Additional values are converted to strings and appended to the URL as - URL querystring parameters: - - >>> urls.build("index", {'q': 'My Searchstring'}) - '/?q=My+Searchstring' - - When processing those additional values, lists are furthermore - interpreted as multiple values (as per - :py:class:`werkzeug.datastructures.MultiDict`): - - >>> urls.build("index", {'q': ['a', 'b', 'c']}) - '/?q=a&q=b&q=c' - - Passing a ``MultiDict`` will also add multiple values: - - >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b')))) - '/?p=z&q=a&q=b' - - If a rule does not exist when building a `BuildError` exception is - raised. - - The build method accepts an argument called `method` which allows you - to specify the method you want to have an URL built for if you have - different methods for the same endpoint specified. - - :param endpoint: the endpoint of the URL to build. - :param values: the values for the URL to build. Unhandled values are - appended to the URL as query parameters. - :param method: the HTTP method for the rule if there are different - URLs for different methods on the same endpoint. - :param force_external: enforce full canonical external URLs. If the URL - scheme is not provided, this will generate - a protocol-relative URL. - :param append_unknown: unknown parameters are appended to the generated - URL as query string argument. Disable this - if you want the builder to ignore those. - :param url_scheme: Scheme to use in place of the bound - :attr:`url_scheme`. - - .. versionchanged:: 2.0 - Added the ``url_scheme`` parameter. - - .. versionadded:: 0.6 - Added the ``append_unknown`` parameter. - """ - self.map.update() - - if values: - temp_values: t.Dict[str, t.Union[t.List[t.Any], t.Any]] = {} - always_list = isinstance(values, MultiDict) - key: str - value: t.Optional[t.Union[t.List[t.Any], t.Any]] - - # For MultiDict, dict.items(values) is like values.lists() - # without the call or list coercion overhead. - for key, value in dict.items(values): # type: ignore - if value is None: - continue - - if always_list or isinstance(value, (list, tuple)): - value = [v for v in value if v is not None] - - if not value: - continue - - if len(value) == 1: - value = value[0] - - temp_values[key] = value - - values = temp_values - else: - values = {} - - rv = self._partial_build(endpoint, values, method, append_unknown) - if rv is None: - raise BuildError(endpoint, values, method, self) - - domain_part, path, websocket = rv - host = self.get_host(domain_part) - - if url_scheme is None: - url_scheme = self.url_scheme - - # Always build WebSocket routes with the scheme (browsers - # require full URLs). If bound to a WebSocket, ensure that HTTP - # routes are built with an HTTP scheme. - secure = url_scheme in {"https", "wss"} - - if websocket: - force_external = True - url_scheme = "wss" if secure else "ws" - elif url_scheme: - url_scheme = "https" if secure else "http" - - # shortcut this. - if not force_external and ( - (self.map.host_matching and host == self.server_name) - or (not self.map.host_matching and domain_part == self.subdomain) - ): - return f"{self.script_name.rstrip('/')}/{path.lstrip('/')}" - - scheme = f"{url_scheme}:" if url_scheme else "" - return f"{scheme}//{host}{self.script_name[:-1]}/{path.lstrip('/')}" diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py deleted file mode 100644 index bb8ab34..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/sansio/multipart.py +++ /dev/null @@ -1,260 +0,0 @@ -import re -from dataclasses import dataclass -from enum import auto -from enum import Enum -from typing import cast -from typing import List -from typing import Optional -from typing import Tuple - -from .._internal import _to_bytes -from .._internal import _to_str -from ..datastructures import Headers -from ..exceptions import RequestEntityTooLarge -from ..http import parse_options_header - - -class Event: - pass - - -@dataclass(frozen=True) -class Preamble(Event): - data: bytes - - -@dataclass(frozen=True) -class Field(Event): - name: str - headers: Headers - - -@dataclass(frozen=True) -class File(Event): - name: str - filename: str - headers: Headers - - -@dataclass(frozen=True) -class Data(Event): - data: bytes - more_data: bool - - -@dataclass(frozen=True) -class Epilogue(Event): - data: bytes - - -class NeedData(Event): - pass - - -NEED_DATA = NeedData() - - -class State(Enum): - PREAMBLE = auto() - PART = auto() - DATA = auto() - EPILOGUE = auto() - COMPLETE = auto() - - -# Multipart line breaks MUST be CRLF (\r\n) by RFC-7578, except that -# many implementations break this and either use CR or LF alone. -LINE_BREAK = b"(?:\r\n|\n|\r)" -BLANK_LINE_RE = re.compile(b"(?:\r\n\r\n|\r\r|\n\n)", re.MULTILINE) -LINE_BREAK_RE = re.compile(LINE_BREAK, re.MULTILINE) -# Header values can be continued via a space or tab after the linebreak, as -# per RFC2231 -HEADER_CONTINUATION_RE = re.compile(b"%s[ \t]" % LINE_BREAK, re.MULTILINE) - - -class MultipartDecoder: - """Decodes a multipart message as bytes into Python events. - - The part data is returned as available to allow the caller to save - the data from memory to disk, if desired. - """ - - def __init__( - self, - boundary: bytes, - max_form_memory_size: Optional[int] = None, - ) -> None: - self.buffer = bytearray() - self.complete = False - self.max_form_memory_size = max_form_memory_size - self.state = State.PREAMBLE - self.boundary = boundary - - # Note in the below \h i.e. horizontal whitespace is used - # as [^\S\n\r] as \h isn't supported in python. - - # The preamble must end with a boundary where the boundary is - # prefixed by a line break, RFC2046. Except that many - # implementations including Werkzeug's tests omit the line - # break prefix. In addition the first boundary could be the - # epilogue boundary (for empty form-data) hence the matching - # group to understand if it is an epilogue boundary. - self.preamble_re = re.compile( - br"%s?--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" - % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), - re.MULTILINE, - ) - # A boundary must include a line break prefix and suffix, and - # may include trailing whitespace. In addition the boundary - # could be the epilogue boundary hence the matching group to - # understand if it is an epilogue boundary. - self.boundary_re = re.compile( - br"%s--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" - % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), - re.MULTILINE, - ) - - def last_newline(self) -> int: - try: - last_nl = self.buffer.rindex(b"\n") - except ValueError: - last_nl = len(self.buffer) - try: - last_cr = self.buffer.rindex(b"\r") - except ValueError: - last_cr = len(self.buffer) - - return min(last_nl, last_cr) - - def receive_data(self, data: Optional[bytes]) -> None: - if data is None: - self.complete = True - elif ( - self.max_form_memory_size is not None - and len(self.buffer) + len(data) > self.max_form_memory_size - ): - raise RequestEntityTooLarge() - else: - self.buffer.extend(data) - - def next_event(self) -> Event: - event: Event = NEED_DATA - - if self.state == State.PREAMBLE: - match = self.preamble_re.search(self.buffer) - if match is not None: - if match.group(1).startswith(b"--"): - self.state = State.EPILOGUE - else: - self.state = State.PART - data = bytes(self.buffer[: match.start()]) - del self.buffer[: match.end()] - event = Preamble(data=data) - - elif self.state == State.PART: - match = BLANK_LINE_RE.search(self.buffer) - if match is not None: - headers = self._parse_headers(self.buffer[: match.start()]) - del self.buffer[: match.end()] - - if "content-disposition" not in headers: - raise ValueError("Missing Content-Disposition header") - - disposition, extra = parse_options_header( - headers["content-disposition"] - ) - name = cast(str, extra.get("name")) - filename = extra.get("filename") - if filename is not None: - event = File( - filename=filename, - headers=headers, - name=name, - ) - else: - event = Field( - headers=headers, - name=name, - ) - self.state = State.DATA - - elif self.state == State.DATA: - if self.buffer.find(b"--" + self.boundary) == -1: - # No complete boundary in the buffer, but there may be - # a partial boundary at the end. As the boundary - # starts with either a nl or cr find the earliest and - # return up to that as data. - data_length = del_index = self.last_newline() - more_data = True - else: - match = self.boundary_re.search(self.buffer) - if match is not None: - if match.group(1).startswith(b"--"): - self.state = State.EPILOGUE - else: - self.state = State.PART - data_length = match.start() - del_index = match.end() - else: - data_length = del_index = self.last_newline() - more_data = match is None - - data = bytes(self.buffer[:data_length]) - del self.buffer[:del_index] - if data or not more_data: - event = Data(data=data, more_data=more_data) - - elif self.state == State.EPILOGUE and self.complete: - event = Epilogue(data=bytes(self.buffer)) - del self.buffer[:] - self.state = State.COMPLETE - - if self.complete and isinstance(event, NeedData): - raise ValueError(f"Invalid form-data cannot parse beyond {self.state}") - - return event - - def _parse_headers(self, data: bytes) -> Headers: - headers: List[Tuple[str, str]] = [] - # Merge the continued headers into one line - data = HEADER_CONTINUATION_RE.sub(b" ", data) - # Now there is one header per line - for line in data.splitlines(): - if line.strip() != b"": - name, value = _to_str(line).strip().split(":", 1) - headers.append((name.strip(), value.strip())) - return Headers(headers) - - -class MultipartEncoder: - def __init__(self, boundary: bytes) -> None: - self.boundary = boundary - self.state = State.PREAMBLE - - def send_event(self, event: Event) -> bytes: - if isinstance(event, Preamble) and self.state == State.PREAMBLE: - self.state = State.PART - return event.data - elif isinstance(event, (Field, File)) and self.state in { - State.PREAMBLE, - State.PART, - State.DATA, - }: - self.state = State.DATA - data = b"\r\n--" + self.boundary + b"\r\n" - data += b'Content-Disposition: form-data; name="%s"' % _to_bytes(event.name) - if isinstance(event, File): - data += b'; filename="%s"' % _to_bytes(event.filename) - data += b"\r\n" - for name, value in cast(Field, event).headers: - if name.lower() != "content-disposition": - data += _to_bytes(f"{name}: {value}\r\n") - data += b"\r\n" - return data - elif isinstance(event, Data) and self.state == State.DATA: - return event.data - elif isinstance(event, Epilogue): - self.state = State.COMPLETE - return b"\r\n--" + self.boundary + b"--\r\n" + event.data - else: - raise ValueError(f"Cannot generate {event} in state: {self.state}") diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py deleted file mode 100644 index 2c21a21..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/sansio/request.py +++ /dev/null @@ -1,548 +0,0 @@ -import typing as t -from datetime import datetime - -from .._internal import _to_str -from ..datastructures import Accept -from ..datastructures import Authorization -from ..datastructures import CharsetAccept -from ..datastructures import ETags -from ..datastructures import Headers -from ..datastructures import HeaderSet -from ..datastructures import IfRange -from ..datastructures import ImmutableList -from ..datastructures import ImmutableMultiDict -from ..datastructures import LanguageAccept -from ..datastructures import MIMEAccept -from ..datastructures import MultiDict -from ..datastructures import Range -from ..datastructures import RequestCacheControl -from ..http import parse_accept_header -from ..http import parse_authorization_header -from ..http import parse_cache_control_header -from ..http import parse_cookie -from ..http import parse_date -from ..http import parse_etags -from ..http import parse_if_range_header -from ..http import parse_list_header -from ..http import parse_options_header -from ..http import parse_range_header -from ..http import parse_set_header -from ..urls import url_decode -from ..user_agent import UserAgent -from ..useragents import _UserAgent as _DeprecatedUserAgent -from ..utils import cached_property -from ..utils import header_property -from .utils import get_current_url -from .utils import get_host - - -class Request: - """Represents the non-IO parts of a HTTP request, including the - method, URL info, and headers. - - This class is not meant for general use. It should only be used when - implementing WSGI, ASGI, or another HTTP application spec. Werkzeug - provides a WSGI implementation at :cls:`werkzeug.wrappers.Request`. - - :param method: The method the request was made with, such as - ``GET``. - :param scheme: The URL scheme of the protocol the request used, such - as ``https`` or ``wss``. - :param server: The address of the server. ``(host, port)``, - ``(path, None)`` for unix sockets, or ``None`` if not known. - :param root_path: The prefix that the application is mounted under. - This is prepended to generated URLs, but is not part of route - matching. - :param path: The path part of the URL after ``root_path``. - :param query_string: The part of the URL after the "?". - :param headers: The headers received with the request. - :param remote_addr: The address of the client sending the request. - - .. versionadded:: 2.0 - """ - - #: The charset used to decode most data in the request. - charset = "utf-8" - - #: the error handling procedure for errors, defaults to 'replace' - encoding_errors = "replace" - - #: the class to use for `args` and `form`. The default is an - #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports - #: multiple values per key. alternatively it makes sense to use an - #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which - #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` - #: which is the fastest but only remembers the last key. It is also - #: possible to use mutable structures, but this is not recommended. - #: - #: .. versionadded:: 0.6 - parameter_storage_class: t.Type[MultiDict] = ImmutableMultiDict - - #: The type to be used for dict values from the incoming WSGI - #: environment. (For example for :attr:`cookies`.) By default an - #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used. - #: - #: .. versionchanged:: 1.0.0 - #: Changed to ``ImmutableMultiDict`` to support multiple values. - #: - #: .. versionadded:: 0.6 - dict_storage_class: t.Type[MultiDict] = ImmutableMultiDict - - #: the type to be used for list values from the incoming WSGI environment. - #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used - #: (for example for :attr:`access_list`). - #: - #: .. versionadded:: 0.6 - list_storage_class: t.Type[t.List] = ImmutableList - - user_agent_class = _DeprecatedUserAgent - """The class used and returned by the :attr:`user_agent` property to - parse the header. Defaults to - :class:`~werkzeug.user_agent.UserAgent`, which does no parsing. An - extension can provide a subclass that uses a parser to provide other - data. - - .. versionadded:: 2.0 - """ - - #: Valid host names when handling requests. By default all hosts are - #: trusted, which means that whatever the client says the host is - #: will be accepted. - #: - #: Because ``Host`` and ``X-Forwarded-Host`` headers can be set to - #: any value by a malicious client, it is recommended to either set - #: this property or implement similar validation in the proxy (if - #: the application is being run behind one). - #: - #: .. versionadded:: 0.9 - trusted_hosts: t.Optional[t.List[str]] = None - - def __init__( - self, - method: str, - scheme: str, - server: t.Optional[t.Tuple[str, t.Optional[int]]], - root_path: str, - path: str, - query_string: bytes, - headers: Headers, - remote_addr: t.Optional[str], - ) -> None: - #: The method the request was made with, such as ``GET``. - self.method = method.upper() - #: The URL scheme of the protocol the request used, such as - #: ``https`` or ``wss``. - self.scheme = scheme - #: The address of the server. ``(host, port)``, ``(path, None)`` - #: for unix sockets, or ``None`` if not known. - self.server = server - #: The prefix that the application is mounted under, without a - #: trailing slash. :attr:`path` comes after this. - self.root_path = root_path.rstrip("/") - #: The path part of the URL after :attr:`root_path`. This is the - #: path used for routing within the application. - self.path = "/" + path.lstrip("/") - #: The part of the URL after the "?". This is the raw value, use - #: :attr:`args` for the parsed values. - self.query_string = query_string - #: The headers received with the request. - self.headers = headers - #: The address of the client sending the request. - self.remote_addr = remote_addr - - def __repr__(self) -> str: - try: - url = self.url - except Exception as e: - url = f"(invalid URL: {e})" - - return f"<{type(self).__name__} {url!r} [{self.method}]>" - - @property - def url_charset(self) -> str: - """The charset that is assumed for URLs. Defaults to the value - of :attr:`charset`. - - .. versionadded:: 0.6 - """ - return self.charset - - @cached_property - def args(self) -> "MultiDict[str, str]": - """The parsed URL parameters (the part in the URL after the question - mark). - - By default an - :class:`~werkzeug.datastructures.ImmutableMultiDict` - is returned from this function. This can be changed by setting - :attr:`parameter_storage_class` to a different type. This might - be necessary if the order of the form data is important. - """ - return url_decode( - self.query_string, - self.url_charset, - errors=self.encoding_errors, - cls=self.parameter_storage_class, - ) - - @cached_property - def access_route(self) -> t.List[str]: - """If a forwarded header exists this is a list of all ip addresses - from the client ip to the last proxy server. - """ - if "X-Forwarded-For" in self.headers: - return self.list_storage_class( - parse_list_header(self.headers["X-Forwarded-For"]) - ) - elif self.remote_addr is not None: - return self.list_storage_class([self.remote_addr]) - return self.list_storage_class() - - @cached_property - def full_path(self) -> str: - """Requested path, including the query string.""" - return f"{self.path}?{_to_str(self.query_string, self.url_charset)}" - - @property - def is_secure(self) -> bool: - """``True`` if the request was made with a secure protocol - (HTTPS or WSS). - """ - return self.scheme in {"https", "wss"} - - @cached_property - def url(self) -> str: - """The full request URL with the scheme, host, root path, path, - and query string.""" - return get_current_url( - self.scheme, self.host, self.root_path, self.path, self.query_string - ) - - @cached_property - def base_url(self) -> str: - """Like :attr:`url` but without the query string.""" - return get_current_url(self.scheme, self.host, self.root_path, self.path) - - @cached_property - def root_url(self) -> str: - """The request URL scheme, host, and root path. This is the root - that the application is accessed from. - """ - return get_current_url(self.scheme, self.host, self.root_path) - - @cached_property - def host_url(self) -> str: - """The request URL scheme and host only.""" - return get_current_url(self.scheme, self.host) - - @cached_property - def host(self) -> str: - """The host name the request was made to, including the port if - it's non-standard. Validated with :attr:`trusted_hosts`. - """ - return get_host( - self.scheme, self.headers.get("host"), self.server, self.trusted_hosts - ) - - @cached_property - def cookies(self) -> "ImmutableMultiDict[str, str]": - """A :class:`dict` with the contents of all cookies transmitted with - the request.""" - wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie")) - return parse_cookie( # type: ignore - wsgi_combined_cookie, - self.charset, - self.encoding_errors, - cls=self.dict_storage_class, - ) - - # Common Descriptors - - content_type = header_property[str]( - "Content-Type", - doc="""The Content-Type entity-header field indicates the media - type of the entity-body sent to the recipient or, in the case of - the HEAD method, the media type that would have been sent had - the request been a GET.""", - read_only=True, - ) - - @cached_property - def content_length(self) -> t.Optional[int]: - """The Content-Length entity-header field indicates the size of the - entity-body in bytes or, in the case of the HEAD method, the size of - the entity-body that would have been sent had the request been a - GET. - """ - if self.headers.get("Transfer-Encoding", "") == "chunked": - return None - - content_length = self.headers.get("Content-Length") - if content_length is not None: - try: - return max(0, int(content_length)) - except (ValueError, TypeError): - pass - - return None - - content_encoding = header_property[str]( - "Content-Encoding", - doc="""The Content-Encoding entity-header field is used as a - modifier to the media-type. When present, its value indicates - what additional content codings have been applied to the - entity-body, and thus what decoding mechanisms must be applied - in order to obtain the media-type referenced by the Content-Type - header field. - - .. versionadded:: 0.9""", - read_only=True, - ) - content_md5 = header_property[str]( - "Content-MD5", - doc="""The Content-MD5 entity-header field, as defined in - RFC 1864, is an MD5 digest of the entity-body for the purpose of - providing an end-to-end message integrity check (MIC) of the - entity-body. (Note: a MIC is good for detecting accidental - modification of the entity-body in transit, but is not proof - against malicious attacks.) - - .. versionadded:: 0.9""", - read_only=True, - ) - referrer = header_property[str]( - "Referer", - doc="""The Referer[sic] request-header field allows the client - to specify, for the server's benefit, the address (URI) of the - resource from which the Request-URI was obtained (the - "referrer", although the header field is misspelled).""", - read_only=True, - ) - date = header_property( - "Date", - None, - parse_date, - doc="""The Date general-header field represents the date and - time at which the message was originated, having the same - semantics as orig-date in RFC 822. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """, - read_only=True, - ) - max_forwards = header_property( - "Max-Forwards", - None, - int, - doc="""The Max-Forwards request-header field provides a - mechanism with the TRACE and OPTIONS methods to limit the number - of proxies or gateways that can forward the request to the next - inbound server.""", - read_only=True, - ) - - def _parse_content_type(self) -> None: - if not hasattr(self, "_parsed_content_type"): - self._parsed_content_type = parse_options_header( - self.headers.get("Content-Type", "") - ) - - @property - def mimetype(self) -> str: - """Like :attr:`content_type`, but without parameters (eg, without - charset, type etc.) and always lowercase. For example if the content - type is ``text/HTML; charset=utf-8`` the mimetype would be - ``'text/html'``. - """ - self._parse_content_type() - return self._parsed_content_type[0].lower() - - @property - def mimetype_params(self) -> t.Dict[str, str]: - """The mimetype parameters as dict. For example if the content - type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - """ - self._parse_content_type() - return self._parsed_content_type[1] - - @cached_property - def pragma(self) -> HeaderSet: - """The Pragma general-header field is used to include - implementation-specific directives that might apply to any recipient - along the request/response chain. All pragma directives specify - optional behavior from the viewpoint of the protocol; however, some - systems MAY require that behavior be consistent with the directives. - """ - return parse_set_header(self.headers.get("Pragma", "")) - - # Accept - - @cached_property - def accept_mimetypes(self) -> MIMEAccept: - """List of mimetypes this client supports as - :class:`~werkzeug.datastructures.MIMEAccept` object. - """ - return parse_accept_header(self.headers.get("Accept"), MIMEAccept) - - @cached_property - def accept_charsets(self) -> CharsetAccept: - """List of charsets this client supports as - :class:`~werkzeug.datastructures.CharsetAccept` object. - """ - return parse_accept_header(self.headers.get("Accept-Charset"), CharsetAccept) - - @cached_property - def accept_encodings(self) -> Accept: - """List of encodings this client accepts. Encodings in a HTTP term - are compression encodings such as gzip. For charsets have a look at - :attr:`accept_charset`. - """ - return parse_accept_header(self.headers.get("Accept-Encoding")) - - @cached_property - def accept_languages(self) -> LanguageAccept: - """List of languages this client accepts as - :class:`~werkzeug.datastructures.LanguageAccept` object. - - .. versionchanged 0.5 - In previous versions this was a regular - :class:`~werkzeug.datastructures.Accept` object. - """ - return parse_accept_header(self.headers.get("Accept-Language"), LanguageAccept) - - # ETag - - @cached_property - def cache_control(self) -> RequestCacheControl: - """A :class:`~werkzeug.datastructures.RequestCacheControl` object - for the incoming cache control headers. - """ - cache_control = self.headers.get("Cache-Control") - return parse_cache_control_header(cache_control, None, RequestCacheControl) - - @cached_property - def if_match(self) -> ETags: - """An object containing all the etags in the `If-Match` header. - - :rtype: :class:`~werkzeug.datastructures.ETags` - """ - return parse_etags(self.headers.get("If-Match")) - - @cached_property - def if_none_match(self) -> ETags: - """An object containing all the etags in the `If-None-Match` header. - - :rtype: :class:`~werkzeug.datastructures.ETags` - """ - return parse_etags(self.headers.get("If-None-Match")) - - @cached_property - def if_modified_since(self) -> t.Optional[datetime]: - """The parsed `If-Modified-Since` header as a datetime object. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """ - return parse_date(self.headers.get("If-Modified-Since")) - - @cached_property - def if_unmodified_since(self) -> t.Optional[datetime]: - """The parsed `If-Unmodified-Since` header as a datetime object. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """ - return parse_date(self.headers.get("If-Unmodified-Since")) - - @cached_property - def if_range(self) -> IfRange: - """The parsed ``If-Range`` header. - - .. versionchanged:: 2.0 - ``IfRange.date`` is timezone-aware. - - .. versionadded:: 0.7 - """ - return parse_if_range_header(self.headers.get("If-Range")) - - @cached_property - def range(self) -> t.Optional[Range]: - """The parsed `Range` header. - - .. versionadded:: 0.7 - - :rtype: :class:`~werkzeug.datastructures.Range` - """ - return parse_range_header(self.headers.get("Range")) - - # User Agent - - @cached_property - def user_agent(self) -> UserAgent: - """The user agent. Use ``user_agent.string`` to get the header - value. Set :attr:`user_agent_class` to a subclass of - :class:`~werkzeug.user_agent.UserAgent` to provide parsing for - the other properties or other extended data. - - .. versionchanged:: 2.0 - The built in parser is deprecated and will be removed in - Werkzeug 2.1. A ``UserAgent`` subclass must be set to parse - data from the string. - """ - return self.user_agent_class(self.headers.get("User-Agent", "")) - - # Authorization - - @cached_property - def authorization(self) -> t.Optional[Authorization]: - """The `Authorization` object in parsed form.""" - return parse_authorization_header(self.headers.get("Authorization")) - - # CORS - - origin = header_property[str]( - "Origin", - doc=( - "The host that the request originated from. Set" - " :attr:`~CORSResponseMixin.access_control_allow_origin` on" - " the response to indicate which origins are allowed." - ), - read_only=True, - ) - - access_control_request_headers = header_property( - "Access-Control-Request-Headers", - load_func=parse_set_header, - doc=( - "Sent with a preflight request to indicate which headers" - " will be sent with the cross origin request. Set" - " :attr:`~CORSResponseMixin.access_control_allow_headers`" - " on the response to indicate which headers are allowed." - ), - read_only=True, - ) - - access_control_request_method = header_property[str]( - "Access-Control-Request-Method", - doc=( - "Sent with a preflight request to indicate which method" - " will be used for the cross origin request. Set" - " :attr:`~CORSResponseMixin.access_control_allow_methods`" - " on the response to indicate which methods are allowed." - ), - read_only=True, - ) - - @property - def is_json(self) -> bool: - """Check if the mimetype indicates JSON data, either - :mimetype:`application/json` or :mimetype:`application/*+json`. - """ - mt = self.mimetype - return ( - mt == "application/json" - or mt.startswith("application/") - and mt.endswith("+json") - ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py deleted file mode 100644 index 82817e8..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/sansio/response.py +++ /dev/null @@ -1,704 +0,0 @@ -import typing as t -from datetime import datetime -from datetime import timedelta -from datetime import timezone -from http import HTTPStatus - -from .._internal import _to_str -from ..datastructures import Headers -from ..datastructures import HeaderSet -from ..http import dump_cookie -from ..http import HTTP_STATUS_CODES -from ..utils import get_content_type -from werkzeug.datastructures import CallbackDict -from werkzeug.datastructures import ContentRange -from werkzeug.datastructures import ContentSecurityPolicy -from werkzeug.datastructures import ResponseCacheControl -from werkzeug.datastructures import WWWAuthenticate -from werkzeug.http import COEP -from werkzeug.http import COOP -from werkzeug.http import dump_age -from werkzeug.http import dump_header -from werkzeug.http import dump_options_header -from werkzeug.http import http_date -from werkzeug.http import parse_age -from werkzeug.http import parse_cache_control_header -from werkzeug.http import parse_content_range_header -from werkzeug.http import parse_csp_header -from werkzeug.http import parse_date -from werkzeug.http import parse_options_header -from werkzeug.http import parse_set_header -from werkzeug.http import parse_www_authenticate_header -from werkzeug.http import quote_etag -from werkzeug.http import unquote_etag -from werkzeug.utils import header_property - - -def _set_property(name: str, doc: t.Optional[str] = None) -> property: - def fget(self: "Response") -> HeaderSet: - def on_update(header_set: HeaderSet) -> None: - if not header_set and name in self.headers: - del self.headers[name] - elif header_set: - self.headers[name] = header_set.to_header() - - return parse_set_header(self.headers.get(name), on_update) - - def fset( - self: "Response", - value: t.Optional[ - t.Union[str, t.Dict[str, t.Union[str, int]], t.Iterable[str]] - ], - ) -> None: - if not value: - del self.headers[name] - elif isinstance(value, str): - self.headers[name] = value - else: - self.headers[name] = dump_header(value) - - return property(fget, fset, doc=doc) - - -class Response: - """Represents the non-IO parts of an HTTP response, specifically the - status and headers but not the body. - - This class is not meant for general use. It should only be used when - implementing WSGI, ASGI, or another HTTP application spec. Werkzeug - provides a WSGI implementation at :cls:`werkzeug.wrappers.Response`. - - :param status: The status code for the response. Either an int, in - which case the default status message is added, or a string in - the form ``{code} {message}``, like ``404 Not Found``. Defaults - to 200. - :param headers: A :class:`~werkzeug.datastructures.Headers` object, - or a list of ``(key, value)`` tuples that will be converted to a - ``Headers`` object. - :param mimetype: The mime type (content type without charset or - other parameters) of the response. If the value starts with - ``text/`` (or matches some other special cases), the charset - will be added to create the ``content_type``. - :param content_type: The full content type of the response. - Overrides building the value from ``mimetype``. - - .. versionadded:: 2.0 - """ - - #: the charset of the response. - charset = "utf-8" - - #: the default status if none is provided. - default_status = 200 - - #: the default mimetype if none is provided. - default_mimetype = "text/plain" - - #: Warn if a cookie header exceeds this size. The default, 4093, should be - #: safely `supported by most browsers `_. A cookie larger than - #: this size will still be sent, but it may be ignored or handled - #: incorrectly by some browsers. Set to 0 to disable this check. - #: - #: .. versionadded:: 0.13 - #: - #: .. _`cookie`: http://browsercookielimits.squawky.net/ - max_cookie_size = 4093 - - # A :class:`Headers` object representing the response headers. - headers: Headers - - def __init__( - self, - status: t.Optional[t.Union[int, str, HTTPStatus]] = None, - headers: t.Optional[ - t.Union[ - t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]], - t.Iterable[t.Tuple[str, t.Union[str, int]]], - ] - ] = None, - mimetype: t.Optional[str] = None, - content_type: t.Optional[str] = None, - ) -> None: - if isinstance(headers, Headers): - self.headers = headers - elif not headers: - self.headers = Headers() - else: - self.headers = Headers(headers) - - if content_type is None: - if mimetype is None and "content-type" not in self.headers: - mimetype = self.default_mimetype - if mimetype is not None: - mimetype = get_content_type(mimetype, self.charset) - content_type = mimetype - if content_type is not None: - self.headers["Content-Type"] = content_type - if status is None: - status = self.default_status - self.status = status # type: ignore - - def __repr__(self) -> str: - return f"<{type(self).__name__} [{self.status}]>" - - @property - def status_code(self) -> int: - """The HTTP status code as a number.""" - return self._status_code - - @status_code.setter - def status_code(self, code: int) -> None: - self.status = code # type: ignore - - @property - def status(self) -> str: - """The HTTP status code as a string.""" - return self._status - - @status.setter - def status(self, value: t.Union[str, int, HTTPStatus]) -> None: - if not isinstance(value, (str, bytes, int, HTTPStatus)): - raise TypeError("Invalid status argument") - - self._status, self._status_code = self._clean_status(value) - - def _clean_status(self, value: t.Union[str, int, HTTPStatus]) -> t.Tuple[str, int]: - if isinstance(value, HTTPStatus): - value = int(value) - status = _to_str(value, self.charset) - split_status = status.split(None, 1) - - if len(split_status) == 0: - raise ValueError("Empty status argument") - - if len(split_status) > 1: - if split_status[0].isdigit(): - # code and message - return status, int(split_status[0]) - - # multi-word message - return f"0 {status}", 0 - - if split_status[0].isdigit(): - # code only - status_code = int(split_status[0]) - - try: - status = f"{status_code} {HTTP_STATUS_CODES[status_code].upper()}" - except KeyError: - status = f"{status_code} UNKNOWN" - - return status, status_code - - # one-word message - return f"0 {status}", 0 - - def set_cookie( - self, - key: str, - value: str = "", - max_age: t.Optional[t.Union[timedelta, int]] = None, - expires: t.Optional[t.Union[str, datetime, int, float]] = None, - path: t.Optional[str] = "/", - domain: t.Optional[str] = None, - secure: bool = False, - httponly: bool = False, - samesite: t.Optional[str] = None, - ) -> None: - """Sets a cookie. - - A warning is raised if the size of the cookie header exceeds - :attr:`max_cookie_size`, but the header will still be set. - - :param key: the key (name) of the cookie to be set. - :param value: the value of the cookie. - :param max_age: should be a number of seconds, or `None` (default) if - the cookie should last only as long as the client's - browser session. - :param expires: should be a `datetime` object or UNIX timestamp. - :param path: limits the cookie to a given path, per default it will - span the whole domain. - :param domain: if you want to set a cross-domain cookie. For example, - ``domain=".example.com"`` will set a cookie that is - readable by the domain ``www.example.com``, - ``foo.example.com`` etc. Otherwise, a cookie will only - be readable by the domain that set it. - :param secure: If ``True``, the cookie will only be available - via HTTPS. - :param httponly: Disallow JavaScript access to the cookie. - :param samesite: Limit the scope of the cookie to only be - attached to requests that are "same-site". - """ - self.headers.add( - "Set-Cookie", - dump_cookie( - key, - value=value, - max_age=max_age, - expires=expires, - path=path, - domain=domain, - secure=secure, - httponly=httponly, - charset=self.charset, - max_size=self.max_cookie_size, - samesite=samesite, - ), - ) - - def delete_cookie( - self, - key: str, - path: str = "/", - domain: t.Optional[str] = None, - secure: bool = False, - httponly: bool = False, - samesite: t.Optional[str] = None, - ) -> None: - """Delete a cookie. Fails silently if key doesn't exist. - - :param key: the key (name) of the cookie to be deleted. - :param path: if the cookie that should be deleted was limited to a - path, the path has to be defined here. - :param domain: if the cookie that should be deleted was limited to a - domain, that domain has to be defined here. - :param secure: If ``True``, the cookie will only be available - via HTTPS. - :param httponly: Disallow JavaScript access to the cookie. - :param samesite: Limit the scope of the cookie to only be - attached to requests that are "same-site". - """ - self.set_cookie( - key, - expires=0, - max_age=0, - path=path, - domain=domain, - secure=secure, - httponly=httponly, - samesite=samesite, - ) - - @property - def is_json(self) -> bool: - """Check if the mimetype indicates JSON data, either - :mimetype:`application/json` or :mimetype:`application/*+json`. - """ - mt = self.mimetype - return mt is not None and ( - mt == "application/json" - or mt.startswith("application/") - and mt.endswith("+json") - ) - - # Common Descriptors - - @property - def mimetype(self) -> t.Optional[str]: - """The mimetype (content type without charset etc.)""" - ct = self.headers.get("content-type") - - if ct: - return ct.split(";")[0].strip() - else: - return None - - @mimetype.setter - def mimetype(self, value: str) -> None: - self.headers["Content-Type"] = get_content_type(value, self.charset) - - @property - def mimetype_params(self) -> t.Dict[str, str]: - """The mimetype parameters as dict. For example if the - content type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - - .. versionadded:: 0.5 - """ - - def on_update(d: CallbackDict) -> None: - self.headers["Content-Type"] = dump_options_header(self.mimetype, d) - - d = parse_options_header(self.headers.get("content-type", ""))[1] - return CallbackDict(d, on_update) - - location = header_property[str]( - "Location", - doc="""The Location response-header field is used to redirect - the recipient to a location other than the Request-URI for - completion of the request or identification of a new - resource.""", - ) - age = header_property( - "Age", - None, - parse_age, - dump_age, # type: ignore - doc="""The Age response-header field conveys the sender's - estimate of the amount of time since the response (or its - revalidation) was generated at the origin server. - - Age values are non-negative decimal integers, representing time - in seconds.""", - ) - content_type = header_property[str]( - "Content-Type", - doc="""The Content-Type entity-header field indicates the media - type of the entity-body sent to the recipient or, in the case of - the HEAD method, the media type that would have been sent had - the request been a GET.""", - ) - content_length = header_property( - "Content-Length", - None, - int, - str, - doc="""The Content-Length entity-header field indicates the size - of the entity-body, in decimal number of OCTETs, sent to the - recipient or, in the case of the HEAD method, the size of the - entity-body that would have been sent had the request been a - GET.""", - ) - content_location = header_property[str]( - "Content-Location", - doc="""The Content-Location entity-header field MAY be used to - supply the resource location for the entity enclosed in the - message when that entity is accessible from a location separate - from the requested resource's URI.""", - ) - content_encoding = header_property[str]( - "Content-Encoding", - doc="""The Content-Encoding entity-header field is used as a - modifier to the media-type. When present, its value indicates - what additional content codings have been applied to the - entity-body, and thus what decoding mechanisms must be applied - in order to obtain the media-type referenced by the Content-Type - header field.""", - ) - content_md5 = header_property[str]( - "Content-MD5", - doc="""The Content-MD5 entity-header field, as defined in - RFC 1864, is an MD5 digest of the entity-body for the purpose of - providing an end-to-end message integrity check (MIC) of the - entity-body. (Note: a MIC is good for detecting accidental - modification of the entity-body in transit, but is not proof - against malicious attacks.)""", - ) - date = header_property( - "Date", - None, - parse_date, - http_date, - doc="""The Date general-header field represents the date and - time at which the message was originated, having the same - semantics as orig-date in RFC 822. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """, - ) - expires = header_property( - "Expires", - None, - parse_date, - http_date, - doc="""The Expires entity-header field gives the date/time after - which the response is considered stale. A stale cache entry may - not normally be returned by a cache. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """, - ) - last_modified = header_property( - "Last-Modified", - None, - parse_date, - http_date, - doc="""The Last-Modified entity-header field indicates the date - and time at which the origin server believes the variant was - last modified. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """, - ) - - @property - def retry_after(self) -> t.Optional[datetime]: - """The Retry-After response-header field can be used with a - 503 (Service Unavailable) response to indicate how long the - service is expected to be unavailable to the requesting client. - - Time in seconds until expiration or date. - - .. versionchanged:: 2.0 - The datetime object is timezone-aware. - """ - value = self.headers.get("retry-after") - if value is None: - return None - elif value.isdigit(): - return datetime.now(timezone.utc) + timedelta(seconds=int(value)) - return parse_date(value) - - @retry_after.setter - def retry_after(self, value: t.Optional[t.Union[datetime, int, str]]) -> None: - if value is None: - if "retry-after" in self.headers: - del self.headers["retry-after"] - return - elif isinstance(value, datetime): - value = http_date(value) - else: - value = str(value) - self.headers["Retry-After"] = value - - vary = _set_property( - "Vary", - doc="""The Vary field value indicates the set of request-header - fields that fully determines, while the response is fresh, - whether a cache is permitted to use the response to reply to a - subsequent request without revalidation.""", - ) - content_language = _set_property( - "Content-Language", - doc="""The Content-Language entity-header field describes the - natural language(s) of the intended audience for the enclosed - entity. Note that this might not be equivalent to all the - languages used within the entity-body.""", - ) - allow = _set_property( - "Allow", - doc="""The Allow entity-header field lists the set of methods - supported by the resource identified by the Request-URI. The - purpose of this field is strictly to inform the recipient of - valid methods associated with the resource. An Allow header - field MUST be present in a 405 (Method Not Allowed) - response.""", - ) - - # ETag - - @property - def cache_control(self) -> ResponseCacheControl: - """The Cache-Control general-header field is used to specify - directives that MUST be obeyed by all caching mechanisms along the - request/response chain. - """ - - def on_update(cache_control: ResponseCacheControl) -> None: - if not cache_control and "cache-control" in self.headers: - del self.headers["cache-control"] - elif cache_control: - self.headers["Cache-Control"] = cache_control.to_header() - - return parse_cache_control_header( - self.headers.get("cache-control"), on_update, ResponseCacheControl - ) - - def set_etag(self, etag: str, weak: bool = False) -> None: - """Set the etag, and override the old one if there was one.""" - self.headers["ETag"] = quote_etag(etag, weak) - - def get_etag(self) -> t.Union[t.Tuple[str, bool], t.Tuple[None, None]]: - """Return a tuple in the form ``(etag, is_weak)``. If there is no - ETag the return value is ``(None, None)``. - """ - return unquote_etag(self.headers.get("ETag")) - - accept_ranges = header_property[str]( - "Accept-Ranges", - doc="""The `Accept-Ranges` header. Even though the name would - indicate that multiple values are supported, it must be one - string token only. - - The values ``'bytes'`` and ``'none'`` are common. - - .. versionadded:: 0.7""", - ) - - @property - def content_range(self) -> ContentRange: - """The ``Content-Range`` header as a - :class:`~werkzeug.datastructures.ContentRange` object. Available - even if the header is not set. - - .. versionadded:: 0.7 - """ - - def on_update(rng: ContentRange) -> None: - if not rng: - del self.headers["content-range"] - else: - self.headers["Content-Range"] = rng.to_header() - - rv = parse_content_range_header(self.headers.get("content-range"), on_update) - # always provide a content range object to make the descriptor - # more user friendly. It provides an unset() method that can be - # used to remove the header quickly. - if rv is None: - rv = ContentRange(None, None, None, on_update=on_update) - return rv - - @content_range.setter - def content_range(self, value: t.Optional[t.Union[ContentRange, str]]) -> None: - if not value: - del self.headers["content-range"] - elif isinstance(value, str): - self.headers["Content-Range"] = value - else: - self.headers["Content-Range"] = value.to_header() - - # Authorization - - @property - def www_authenticate(self) -> WWWAuthenticate: - """The ``WWW-Authenticate`` header in a parsed form.""" - - def on_update(www_auth: WWWAuthenticate) -> None: - if not www_auth and "www-authenticate" in self.headers: - del self.headers["www-authenticate"] - elif www_auth: - self.headers["WWW-Authenticate"] = www_auth.to_header() - - header = self.headers.get("www-authenticate") - return parse_www_authenticate_header(header, on_update) - - # CSP - - @property - def content_security_policy(self) -> ContentSecurityPolicy: - """The ``Content-Security-Policy`` header as a - :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available - even if the header is not set. - - The Content-Security-Policy header adds an additional layer of - security to help detect and mitigate certain types of attacks. - """ - - def on_update(csp: ContentSecurityPolicy) -> None: - if not csp: - del self.headers["content-security-policy"] - else: - self.headers["Content-Security-Policy"] = csp.to_header() - - rv = parse_csp_header(self.headers.get("content-security-policy"), on_update) - if rv is None: - rv = ContentSecurityPolicy(None, on_update=on_update) - return rv - - @content_security_policy.setter - def content_security_policy( - self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] - ) -> None: - if not value: - del self.headers["content-security-policy"] - elif isinstance(value, str): - self.headers["Content-Security-Policy"] = value - else: - self.headers["Content-Security-Policy"] = value.to_header() - - @property - def content_security_policy_report_only(self) -> ContentSecurityPolicy: - """The ``Content-Security-policy-report-only`` header as a - :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available - even if the header is not set. - - The Content-Security-Policy-Report-Only header adds a csp policy - that is not enforced but is reported thereby helping detect - certain types of attacks. - """ - - def on_update(csp: ContentSecurityPolicy) -> None: - if not csp: - del self.headers["content-security-policy-report-only"] - else: - self.headers["Content-Security-policy-report-only"] = csp.to_header() - - rv = parse_csp_header( - self.headers.get("content-security-policy-report-only"), on_update - ) - if rv is None: - rv = ContentSecurityPolicy(None, on_update=on_update) - return rv - - @content_security_policy_report_only.setter - def content_security_policy_report_only( - self, value: t.Optional[t.Union[ContentSecurityPolicy, str]] - ) -> None: - if not value: - del self.headers["content-security-policy-report-only"] - elif isinstance(value, str): - self.headers["Content-Security-policy-report-only"] = value - else: - self.headers["Content-Security-policy-report-only"] = value.to_header() - - # CORS - - @property - def access_control_allow_credentials(self) -> bool: - """Whether credentials can be shared by the browser to - JavaScript code. As part of the preflight request it indicates - whether credentials can be used on the cross origin request. - """ - return "Access-Control-Allow-Credentials" in self.headers - - @access_control_allow_credentials.setter - def access_control_allow_credentials(self, value: t.Optional[bool]) -> None: - if value is True: - self.headers["Access-Control-Allow-Credentials"] = "true" - else: - self.headers.pop("Access-Control-Allow-Credentials", None) - - access_control_allow_headers = header_property( - "Access-Control-Allow-Headers", - load_func=parse_set_header, - dump_func=dump_header, - doc="Which headers can be sent with the cross origin request.", - ) - - access_control_allow_methods = header_property( - "Access-Control-Allow-Methods", - load_func=parse_set_header, - dump_func=dump_header, - doc="Which methods can be used for the cross origin request.", - ) - - access_control_allow_origin = header_property[str]( - "Access-Control-Allow-Origin", - doc="The origin or '*' for any origin that may make cross origin requests.", - ) - - access_control_expose_headers = header_property( - "Access-Control-Expose-Headers", - load_func=parse_set_header, - dump_func=dump_header, - doc="Which headers can be shared by the browser to JavaScript code.", - ) - - access_control_max_age = header_property( - "Access-Control-Max-Age", - load_func=int, - dump_func=str, - doc="The maximum age in seconds the access control settings can be cached for.", - ) - - cross_origin_opener_policy = header_property[COOP]( - "Cross-Origin-Opener-Policy", - load_func=lambda value: COOP(value), - dump_func=lambda value: value.value, - default=COOP.UNSAFE_NONE, - doc="""Allows control over sharing of browsing context group with cross-origin - documents. Values must be a member of the :class:`werkzeug.http.COOP` enum.""", - ) - - cross_origin_embedder_policy = header_property[COEP]( - "Cross-Origin-Embedder-Policy", - load_func=lambda value: COEP(value), - dump_func=lambda value: value.value, - default=COEP.UNSAFE_NONE, - doc="""Prevents a document from loading any cross-origin resources that do not - explicitly grant the document permission. Values must be a member of the - :class:`werkzeug.http.COEP` enum.""", - ) diff --git a/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py b/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py deleted file mode 100644 index 1b4d892..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/sansio/utils.py +++ /dev/null @@ -1,142 +0,0 @@ -import typing as t - -from .._internal import _encode_idna -from ..exceptions import SecurityError -from ..urls import uri_to_iri -from ..urls import url_quote - - -def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool: - """Check if a host matches a list of trusted names. - - :param hostname: The name to check. - :param trusted_list: A list of valid names to match. If a name - starts with a dot it will match all subdomains. - - .. versionadded:: 0.9 - """ - if not hostname: - return False - - if isinstance(trusted_list, str): - trusted_list = [trusted_list] - - def _normalize(hostname: str) -> bytes: - if ":" in hostname: - hostname = hostname.rsplit(":", 1)[0] - - return _encode_idna(hostname) - - try: - hostname_bytes = _normalize(hostname) - except UnicodeError: - return False - - for ref in trusted_list: - if ref.startswith("."): - ref = ref[1:] - suffix_match = True - else: - suffix_match = False - - try: - ref_bytes = _normalize(ref) - except UnicodeError: - return False - - if ref_bytes == hostname_bytes: - return True - - if suffix_match and hostname_bytes.endswith(b"." + ref_bytes): - return True - - return False - - -def get_host( - scheme: str, - host_header: t.Optional[str], - server: t.Optional[t.Tuple[str, t.Optional[int]]] = None, - trusted_hosts: t.Optional[t.Iterable[str]] = None, -) -> str: - """Return the host for the given parameters. - - This first checks the ``host_header``. If it's not present, then - ``server`` is used. The host will only contain the port if it is - different than the standard port for the protocol. - - Optionally, verify that the host is trusted using - :func:`host_is_trusted` and raise a - :exc:`~werkzeug.exceptions.SecurityError` if it is not. - - :param scheme: The protocol the request used, like ``"https"``. - :param host_header: The ``Host`` header value. - :param server: Address of the server. ``(host, port)``, or - ``(path, None)`` for unix sockets. - :param trusted_hosts: A list of trusted host names. - - :return: Host, with port if necessary. - :raise ~werkzeug.exceptions.SecurityError: If the host is not - trusted. - """ - host = "" - - if host_header is not None: - host = host_header - elif server is not None: - host = server[0] - - if server[1] is not None: - host = f"{host}:{server[1]}" - - if scheme in {"http", "ws"} and host.endswith(":80"): - host = host[:-3] - elif scheme in {"https", "wss"} and host.endswith(":443"): - host = host[:-4] - - if trusted_hosts is not None: - if not host_is_trusted(host, trusted_hosts): - raise SecurityError(f"Host {host!r} is not trusted.") - - return host - - -def get_current_url( - scheme: str, - host: str, - root_path: t.Optional[str] = None, - path: t.Optional[str] = None, - query_string: t.Optional[bytes] = None, -) -> str: - """Recreate the URL for a request. If an optional part isn't - provided, it and subsequent parts are not included in the URL. - - The URL is an IRI, not a URI, so it may contain Unicode characters. - Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. - - :param scheme: The protocol the request used, like ``"https"``. - :param host: The host the request was made to. See :func:`get_host`. - :param root_path: Prefix that the application is mounted under. This - is prepended to ``path``. - :param path: The path part of the URL after ``root_path``. - :param query_string: The portion of the URL after the "?". - """ - url = [scheme, "://", host] - - if root_path is None: - url.append("/") - return uri_to_iri("".join(url)) - - url.append(url_quote(root_path.rstrip("/"))) - url.append("/") - - if path is None: - return uri_to_iri("".join(url)) - - url.append(url_quote(path.lstrip("/"))) - - if query_string: - url.append("?") - url.append(url_quote(query_string, safe=":&%=+$!*'(),")) - - return uri_to_iri("".join(url)) diff --git a/venv/lib/python3.8/site-packages/werkzeug/security.py b/venv/lib/python3.8/site-packages/werkzeug/security.py deleted file mode 100644 index e23040a..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/security.py +++ /dev/null @@ -1,247 +0,0 @@ -import hashlib -import hmac -import os -import posixpath -import secrets -import typing as t -import warnings - -if t.TYPE_CHECKING: - pass - -SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" -DEFAULT_PBKDF2_ITERATIONS = 260000 - -_os_alt_seps: t.List[str] = list( - sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/" -) - - -def pbkdf2_hex( - data: t.Union[str, bytes], - salt: t.Union[str, bytes], - iterations: int = DEFAULT_PBKDF2_ITERATIONS, - keylen: t.Optional[int] = None, - hashfunc: t.Optional[t.Union[str, t.Callable]] = None, -) -> str: - """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. - - :param data: the data to derive. - :param salt: the salt for the derivation. - :param iterations: the number of iterations. - :param keylen: the length of the resulting key. If not provided, - the digest size will be used. - :param hashfunc: the hash function to use. This can either be the - string name of a known hash function, or a function - from the hashlib module. Defaults to sha256. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac` - instead. - - .. versionadded:: 0.9 - """ - warnings.warn( - "'pbkdf2_hex' is deprecated and will be removed in Werkzeug" - " 2.1. Use 'hashlib.pbkdf2_hmac().hex()' instead.", - DeprecationWarning, - stacklevel=2, - ) - return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).hex() - - -def pbkdf2_bin( - data: t.Union[str, bytes], - salt: t.Union[str, bytes], - iterations: int = DEFAULT_PBKDF2_ITERATIONS, - keylen: t.Optional[int] = None, - hashfunc: t.Optional[t.Union[str, t.Callable]] = None, -) -> bytes: - """Returns a binary digest for the PBKDF2 hash algorithm of `data` - with the given `salt`. It iterates `iterations` times and produces a - key of `keylen` bytes. By default, SHA-256 is used as hash function; - a different hashlib `hashfunc` can be provided. - - :param data: the data to derive. - :param salt: the salt for the derivation. - :param iterations: the number of iterations. - :param keylen: the length of the resulting key. If not provided - the digest size will be used. - :param hashfunc: the hash function to use. This can either be the - string name of a known hash function or a function - from the hashlib module. Defaults to sha256. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :func:`hashlib.pbkdf2_hmac` - instead. - - .. versionadded:: 0.9 - """ - warnings.warn( - "'pbkdf2_bin' is deprecated and will be removed in Werkzeug" - " 2.1. Use 'hashlib.pbkdf2_hmac()' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(data, str): - data = data.encode("utf8") - - if isinstance(salt, str): - salt = salt.encode("utf8") - - if not hashfunc: - hash_name = "sha256" - elif callable(hashfunc): - hash_name = hashfunc().name - else: - hash_name = hashfunc - - return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) - - -def safe_str_cmp(a: str, b: str) -> bool: - """This function compares strings in somewhat constant time. This - requires that the length of at least one string is known in advance. - - Returns `True` if the two strings are equal, or `False` if they are not. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use - :func:`hmac.compare_digest` instead. - - .. versionadded:: 0.7 - """ - warnings.warn( - "'safe_str_cmp' is deprecated and will be removed in Werkzeug" - " 2.1. Use 'hmac.compare_digest' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if isinstance(a, str): - a = a.encode("utf-8") # type: ignore - - if isinstance(b, str): - b = b.encode("utf-8") # type: ignore - - return hmac.compare_digest(a, b) - - -def gen_salt(length: int) -> str: - """Generate a random string of SALT_CHARS with specified ``length``.""" - if length <= 0: - raise ValueError("Salt length must be positive") - - return "".join(secrets.choice(SALT_CHARS) for _ in range(length)) - - -def _hash_internal(method: str, salt: str, password: str) -> t.Tuple[str, str]: - """Internal password hash helper. Supports plaintext without salt, - unsalted and salted passwords. In case salted passwords are used - hmac is used. - """ - if method == "plain": - return password, method - - salt = salt.encode("utf-8") - password = password.encode("utf-8") - - if method.startswith("pbkdf2:"): - if not salt: - raise ValueError("Salt is required for PBKDF2") - - args = method[7:].split(":") - - if len(args) not in (1, 2): - raise ValueError("Invalid number of arguments for PBKDF2") - - method = args.pop(0) - iterations = int(args[0] or 0) if args else DEFAULT_PBKDF2_ITERATIONS - return ( - hashlib.pbkdf2_hmac(method, password, salt, iterations).hex(), - f"pbkdf2:{method}:{iterations}", - ) - - if salt: - return hmac.new(salt, password, method).hexdigest(), method - - return hashlib.new(method, password).hexdigest(), method - - -def generate_password_hash( - password: str, method: str = "pbkdf2:sha256", salt_length: int = 16 -) -> str: - """Hash a password with the given method and salt with a string of - the given length. The format of the string returned includes the method - that was used so that :func:`check_password_hash` can check the hash. - - The format for the hashed string looks like this:: - - method$salt$hash - - This method can **not** generate unsalted passwords but it is possible - to set param method='plain' in order to enforce plaintext passwords. - If a salt is used, hmac is used internally to salt the password. - - If PBKDF2 is wanted it can be enabled by setting the method to - ``pbkdf2:method:iterations`` where iterations is optional:: - - pbkdf2:sha256:80000$salt$hash - pbkdf2:sha256$salt$hash - - :param password: the password to hash. - :param method: the hash method to use (one that hashlib supports). Can - optionally be in the format ``pbkdf2:method:iterations`` - to enable PBKDF2. - :param salt_length: the length of the salt in letters. - """ - salt = gen_salt(salt_length) if method != "plain" else "" - h, actual_method = _hash_internal(method, salt, password) - return f"{actual_method}${salt}${h}" - - -def check_password_hash(pwhash: str, password: str) -> bool: - """Check a password against a given salted and hashed password value. - In order to support unsalted legacy passwords this method supports - plain text passwords, md5 and sha1 hashes (both salted and unsalted). - - Returns `True` if the password matched, `False` otherwise. - - :param pwhash: a hashed string like returned by - :func:`generate_password_hash`. - :param password: the plaintext password to compare against the hash. - """ - if pwhash.count("$") < 2: - return False - - method, salt, hashval = pwhash.split("$", 2) - return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval) - - -def safe_join(directory: str, *pathnames: str) -> t.Optional[str]: - """Safely join zero or more untrusted path components to a base - directory to avoid escaping the base directory. - - :param directory: The trusted base directory. - :param pathnames: The untrusted path components relative to the - base directory. - :return: A safe path, otherwise ``None``. - """ - parts = [directory] - - for filename in pathnames: - if filename != "": - filename = posixpath.normpath(filename) - - if ( - any(sep in filename for sep in _os_alt_seps) - or os.path.isabs(filename) - or filename == ".." - or filename.startswith("../") - ): - return None - - parts.append(filename) - - return posixpath.join(*parts) diff --git a/venv/lib/python3.8/site-packages/werkzeug/serving.py b/venv/lib/python3.8/site-packages/werkzeug/serving.py deleted file mode 100644 index 197b6fb..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/serving.py +++ /dev/null @@ -1,1081 +0,0 @@ -"""A WSGI and HTTP server for use **during development only**. This -server is convenient to use, but is not designed to be particularly -stable, secure, or efficient. Use a dedicate WSGI server and HTTP -server when deploying to production. - -It provides features like interactive debugging and code reloading. Use -``run_simple`` to start the server. Put this in a ``run.py`` script: - -.. code-block:: python - - from myapp import create_app - from werkzeug import run_simple -""" -import io -import os -import platform -import signal -import socket -import socketserver -import sys -import typing as t -import warnings -from datetime import datetime as dt -from datetime import timedelta -from datetime import timezone -from http.server import BaseHTTPRequestHandler -from http.server import HTTPServer - -from ._internal import _log -from ._internal import _wsgi_encoding_dance -from .exceptions import InternalServerError -from .urls import uri_to_iri -from .urls import url_parse -from .urls import url_unquote - -try: - import ssl -except ImportError: - - class _SslDummy: - def __getattr__(self, name: str) -> t.Any: - raise RuntimeError("SSL support unavailable") # noqa: B904 - - ssl = _SslDummy() # type: ignore - -_log_add_style = True - -if os.name == "nt": - try: - __import__("colorama") - except ImportError: - _log_add_style = False - -can_fork = hasattr(os, "fork") - -if can_fork: - ForkingMixIn = socketserver.ForkingMixIn -else: - - class ForkingMixIn: # type: ignore - pass - - -try: - af_unix = socket.AF_UNIX -except AttributeError: - af_unix = None # type: ignore - -LISTEN_QUEUE = 128 -can_open_by_fd = not platform.system() == "Windows" and hasattr(socket, "fromfd") - -_TSSLContextArg = t.Optional[ - t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], "te.Literal['adhoc']"] -] - -if t.TYPE_CHECKING: - import typing_extensions as te # noqa: F401 - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - from cryptography.hazmat.primitives.asymmetric.rsa import ( - RSAPrivateKeyWithSerialization, - ) - from cryptography.x509 import Certificate - - -class DechunkedInput(io.RawIOBase): - """An input stream that handles Transfer-Encoding 'chunked'""" - - def __init__(self, rfile: t.IO[bytes]) -> None: - self._rfile = rfile - self._done = False - self._len = 0 - - def readable(self) -> bool: - return True - - def read_chunk_len(self) -> int: - try: - line = self._rfile.readline().decode("latin1") - _len = int(line.strip(), 16) - except ValueError as e: - raise OSError("Invalid chunk header") from e - if _len < 0: - raise OSError("Negative chunk length not allowed") - return _len - - def readinto(self, buf: bytearray) -> int: # type: ignore - read = 0 - while not self._done and read < len(buf): - if self._len == 0: - # This is the first chunk or we fully consumed the previous - # one. Read the next length of the next chunk - self._len = self.read_chunk_len() - - if self._len == 0: - # Found the final chunk of size 0. The stream is now exhausted, - # but there is still a final newline that should be consumed - self._done = True - - if self._len > 0: - # There is data (left) in this chunk, so append it to the - # buffer. If this operation fully consumes the chunk, this will - # reset self._len to 0. - n = min(len(buf), self._len) - - # If (read + chunk size) becomes more than len(buf), buf will - # grow beyond the original size and read more data than - # required. So only read as much data as can fit in buf. - if read + n > len(buf): - buf[read:] = self._rfile.read(len(buf) - read) - self._len -= len(buf) - read - read = len(buf) - else: - buf[read : read + n] = self._rfile.read(n) - self._len -= n - read += n - - if self._len == 0: - # Skip the terminating newline of a chunk that has been fully - # consumed. This also applies to the 0-sized final chunk - terminator = self._rfile.readline() - if terminator not in (b"\n", b"\r\n", b"\r"): - raise OSError("Missing chunk terminating newline") - - return read - - -class WSGIRequestHandler(BaseHTTPRequestHandler): - """A request handler that implements WSGI dispatching.""" - - server: "BaseWSGIServer" - - @property - def server_version(self) -> str: # type: ignore - from . import __version__ - - return f"Werkzeug/{__version__}" - - def make_environ(self) -> "WSGIEnvironment": - request_url = url_parse(self.path) - - def shutdown_server() -> None: - warnings.warn( - "The 'environ['werkzeug.server.shutdown']' function is" - " deprecated and will be removed in Werkzeug 2.1.", - stacklevel=2, - ) - self.server.shutdown_signal = True - - url_scheme = "http" if self.server.ssl_context is None else "https" - - if not self.client_address: - self.client_address = ("", 0) - elif isinstance(self.client_address, str): - self.client_address = (self.client_address, 0) - - # If there was no scheme but the path started with two slashes, - # the first segment may have been incorrectly parsed as the - # netloc, prepend it to the path again. - if not request_url.scheme and request_url.netloc: - path_info = f"/{request_url.netloc}{request_url.path}" - else: - path_info = request_url.path - - path_info = url_unquote(path_info) - - environ: "WSGIEnvironment" = { - "wsgi.version": (1, 0), - "wsgi.url_scheme": url_scheme, - "wsgi.input": self.rfile, - "wsgi.errors": sys.stderr, - "wsgi.multithread": self.server.multithread, - "wsgi.multiprocess": self.server.multiprocess, - "wsgi.run_once": False, - "werkzeug.server.shutdown": shutdown_server, - "werkzeug.socket": self.connection, - "SERVER_SOFTWARE": self.server_version, - "REQUEST_METHOD": self.command, - "SCRIPT_NAME": "", - "PATH_INFO": _wsgi_encoding_dance(path_info), - "QUERY_STRING": _wsgi_encoding_dance(request_url.query), - # Non-standard, added by mod_wsgi, uWSGI - "REQUEST_URI": _wsgi_encoding_dance(self.path), - # Non-standard, added by gunicorn - "RAW_URI": _wsgi_encoding_dance(self.path), - "REMOTE_ADDR": self.address_string(), - "REMOTE_PORT": self.port_integer(), - "SERVER_NAME": self.server.server_address[0], - "SERVER_PORT": str(self.server.server_address[1]), - "SERVER_PROTOCOL": self.request_version, - } - - for key, value in self.headers.items(): - key = key.upper().replace("-", "_") - value = value.replace("\r\n", "") - if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): - key = f"HTTP_{key}" - if key in environ: - value = f"{environ[key]},{value}" - environ[key] = value - - if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked": - environ["wsgi.input_terminated"] = True - environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"]) - - # Per RFC 2616, if the URL is absolute, use that as the host. - # We're using "has a scheme" to indicate an absolute URL. - if request_url.scheme and request_url.netloc: - environ["HTTP_HOST"] = request_url.netloc - - try: - # binary_form=False gives nicer information, but wouldn't be compatible with - # what Nginx or Apache could return. - peer_cert = self.connection.getpeercert(binary_form=True) - if peer_cert is not None: - # Nginx and Apache use PEM format. - environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert) - except ValueError: - # SSL handshake hasn't finished. - self.server.log("error", "Cannot fetch SSL peer certificate info") - except AttributeError: - # Not using TLS, the socket will not have getpeercert(). - pass - - return environ - - def run_wsgi(self) -> None: - if self.headers.get("Expect", "").lower().strip() == "100-continue": - self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n") - - self.environ = environ = self.make_environ() - status_set: t.Optional[str] = None - headers_set: t.Optional[t.List[t.Tuple[str, str]]] = None - status_sent: t.Optional[str] = None - headers_sent: t.Optional[t.List[t.Tuple[str, str]]] = None - - def write(data: bytes) -> None: - nonlocal status_sent, headers_sent - assert status_set is not None, "write() before start_response" - assert headers_set is not None, "write() before start_response" - if status_sent is None: - status_sent = status_set - headers_sent = headers_set - try: - code_str, msg = status_sent.split(None, 1) - except ValueError: - code_str, msg = status_sent, "" - code = int(code_str) - self.send_response(code, msg) - header_keys = set() - for key, value in headers_sent: - self.send_header(key, value) - key = key.lower() - header_keys.add(key) - if not ( - "content-length" in header_keys - or environ["REQUEST_METHOD"] == "HEAD" - or code < 200 - or code in (204, 304) - ): - self.close_connection = True - self.send_header("Connection", "close") - if "server" not in header_keys: - self.send_header("Server", self.version_string()) - if "date" not in header_keys: - self.send_header("Date", self.date_time_string()) - self.end_headers() - - assert isinstance(data, bytes), "applications must write bytes" - self.wfile.write(data) - self.wfile.flush() - - def start_response(status, headers, exc_info=None): # type: ignore - nonlocal status_set, headers_set - if exc_info: - try: - if headers_sent: - raise exc_info[1].with_traceback(exc_info[2]) - finally: - exc_info = None - elif headers_set: - raise AssertionError("Headers already set") - status_set = status - headers_set = headers - return write - - def execute(app: "WSGIApplication") -> None: - application_iter = app(environ, start_response) - try: - for data in application_iter: - write(data) - if not headers_sent: - write(b"") - finally: - if hasattr(application_iter, "close"): - application_iter.close() # type: ignore - - try: - execute(self.server.app) - except (ConnectionError, socket.timeout) as e: - self.connection_dropped(e, environ) - except Exception: - if self.server.passthrough_errors: - raise - from .debug.tbtools import get_current_traceback - - traceback = get_current_traceback(ignore_system_exceptions=True) - try: - # if we haven't yet sent the headers but they are set - # we roll back to be able to set them again. - if status_sent is None: - status_set = None - headers_set = None - execute(InternalServerError()) - except Exception: - pass - self.server.log("error", "Error on request:\n%s", traceback.plaintext) - - def handle(self) -> None: - """Handles a request ignoring dropped connections.""" - try: - BaseHTTPRequestHandler.handle(self) - except (ConnectionError, socket.timeout) as e: - self.connection_dropped(e) - except Exception as e: - if self.server.ssl_context is not None and is_ssl_error(e): - self.log_error("SSL error occurred: %s", e) - else: - raise - if self.server.shutdown_signal: - self.initiate_shutdown() - - def initiate_shutdown(self) -> None: - if is_running_from_reloader(): - # Windows does not provide SIGKILL, go with SIGTERM then. - sig = getattr(signal, "SIGKILL", signal.SIGTERM) - os.kill(os.getpid(), sig) - - self.server._BaseServer__shutdown_request = True # type: ignore - - def connection_dropped( - self, error: BaseException, environ: t.Optional["WSGIEnvironment"] = None - ) -> None: - """Called if the connection was closed by the client. By default - nothing happens. - """ - - def handle_one_request(self) -> None: - """Handle a single HTTP request.""" - self.raw_requestline = self.rfile.readline() - if not self.raw_requestline: - self.close_connection = True - elif self.parse_request(): - self.run_wsgi() - - def send_response(self, code: int, message: t.Optional[str] = None) -> None: - """Send the response header and log the response code.""" - self.log_request(code) - if message is None: - message = self.responses[code][0] if code in self.responses else "" - if self.request_version != "HTTP/0.9": - hdr = f"{self.protocol_version} {code} {message}\r\n" - self.wfile.write(hdr.encode("ascii")) - - def version_string(self) -> str: - return super().version_string().strip() - - def address_string(self) -> str: - if getattr(self, "environ", None): - return self.environ["REMOTE_ADDR"] # type: ignore - - if not self.client_address: - return "" - - return self.client_address[0] - - def port_integer(self) -> int: - return self.client_address[1] - - def log_request( - self, code: t.Union[int, str] = "-", size: t.Union[int, str] = "-" - ) -> None: - try: - path = uri_to_iri(self.path) - msg = f"{self.command} {path} {self.request_version}" - except AttributeError: - # path isn't set if the requestline was bad - msg = self.requestline - - code = str(code) - - if _log_add_style: - if code[0] == "1": # 1xx - Informational - msg = _ansi_style(msg, "bold") - elif code == "200": # 2xx - Success - pass - elif code == "304": # 304 - Resource Not Modified - msg = _ansi_style(msg, "cyan") - elif code[0] == "3": # 3xx - Redirection - msg = _ansi_style(msg, "green") - elif code == "404": # 404 - Resource Not Found - msg = _ansi_style(msg, "yellow") - elif code[0] == "4": # 4xx - Client Error - msg = _ansi_style(msg, "bold", "red") - else: # 5xx, or any other response - msg = _ansi_style(msg, "bold", "magenta") - - self.log("info", '"%s" %s %s', msg, code, size) - - def log_error(self, format: str, *args: t.Any) -> None: - self.log("error", format, *args) - - def log_message(self, format: str, *args: t.Any) -> None: - self.log("info", format, *args) - - def log(self, type: str, message: str, *args: t.Any) -> None: - _log( - type, - f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n", - *args, - ) - - -def _ansi_style(value: str, *styles: str) -> str: - codes = { - "bold": 1, - "red": 31, - "green": 32, - "yellow": 33, - "magenta": 35, - "cyan": 36, - } - - for style in styles: - value = f"\x1b[{codes[style]}m{value}" - - return f"{value}\x1b[0m" - - -def generate_adhoc_ssl_pair( - cn: t.Optional[str] = None, -) -> t.Tuple["Certificate", "RSAPrivateKeyWithSerialization"]: - try: - from cryptography import x509 - from cryptography.x509.oid import NameOID - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives.asymmetric import rsa - except ImportError: - raise TypeError( - "Using ad-hoc certificates requires the cryptography library." - ) from None - - backend = default_backend() - pkey = rsa.generate_private_key( - public_exponent=65537, key_size=2048, backend=backend - ) - - # pretty damn sure that this is not actually accepted by anyone - if cn is None: - cn = "*" - - subject = x509.Name( - [ - x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"), - x509.NameAttribute(NameOID.COMMON_NAME, cn), - ] - ) - - backend = default_backend() - cert = ( - x509.CertificateBuilder() - .subject_name(subject) - .issuer_name(subject) - .public_key(pkey.public_key()) - .serial_number(x509.random_serial_number()) - .not_valid_before(dt.now(timezone.utc)) - .not_valid_after(dt.now(timezone.utc) + timedelta(days=365)) - .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False) - .add_extension(x509.SubjectAlternativeName([x509.DNSName(cn)]), critical=False) - .sign(pkey, hashes.SHA256(), backend) - ) - return cert, pkey - - -def make_ssl_devcert( - base_path: str, host: t.Optional[str] = None, cn: t.Optional[str] = None -) -> t.Tuple[str, str]: - """Creates an SSL key for development. This should be used instead of - the ``'adhoc'`` key which generates a new cert on each server start. - It accepts a path for where it should store the key and cert and - either a host or CN. If a host is given it will use the CN - ``*.host/CN=host``. - - For more information see :func:`run_simple`. - - .. versionadded:: 0.9 - - :param base_path: the path to the certificate and key. The extension - ``.crt`` is added for the certificate, ``.key`` is - added for the key. - :param host: the name of the host. This can be used as an alternative - for the `cn`. - :param cn: the `CN` to use. - """ - - if host is not None: - cn = f"*.{host}/CN={host}" - cert, pkey = generate_adhoc_ssl_pair(cn=cn) - - from cryptography.hazmat.primitives import serialization - - cert_file = f"{base_path}.crt" - pkey_file = f"{base_path}.key" - - with open(cert_file, "wb") as f: - f.write(cert.public_bytes(serialization.Encoding.PEM)) - with open(pkey_file, "wb") as f: - f.write( - pkey.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption(), - ) - ) - - return cert_file, pkey_file - - -def generate_adhoc_ssl_context() -> "ssl.SSLContext": - """Generates an adhoc SSL context for the development server.""" - import tempfile - import atexit - - cert, pkey = generate_adhoc_ssl_pair() - - from cryptography.hazmat.primitives import serialization - - cert_handle, cert_file = tempfile.mkstemp() - pkey_handle, pkey_file = tempfile.mkstemp() - atexit.register(os.remove, pkey_file) - atexit.register(os.remove, cert_file) - - os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM)) - os.write( - pkey_handle, - pkey.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption(), - ), - ) - - os.close(cert_handle) - os.close(pkey_handle) - ctx = load_ssl_context(cert_file, pkey_file) - return ctx - - -def load_ssl_context( - cert_file: str, pkey_file: t.Optional[str] = None, protocol: t.Optional[int] = None -) -> "ssl.SSLContext": - """Loads SSL context from cert/private key files and optional protocol. - Many parameters are directly taken from the API of - :py:class:`ssl.SSLContext`. - - :param cert_file: Path of the certificate to use. - :param pkey_file: Path of the private key to use. If not given, the key - will be obtained from the certificate file. - :param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module. - Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`. - """ - if protocol is None: - protocol = ssl.PROTOCOL_TLS_SERVER - - ctx = ssl.SSLContext(protocol) - ctx.load_cert_chain(cert_file, pkey_file) - return ctx - - -def is_ssl_error(error: t.Optional[Exception] = None) -> bool: - """Checks if the given error (or the current one) is an SSL error.""" - if error is None: - error = t.cast(Exception, sys.exc_info()[1]) - return isinstance(error, ssl.SSLError) - - -def select_address_family(host: str, port: int) -> socket.AddressFamily: - """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on - the host and port.""" - if host.startswith("unix://"): - return socket.AF_UNIX - elif ":" in host and hasattr(socket, "AF_INET6"): - return socket.AF_INET6 - return socket.AF_INET - - -def get_sockaddr( - host: str, port: int, family: socket.AddressFamily -) -> t.Union[t.Tuple[str, int], str]: - """Return a fully qualified socket address that can be passed to - :func:`socket.bind`.""" - if family == af_unix: - return host.split("://", 1)[1] - try: - res = socket.getaddrinfo( - host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP - ) - except socket.gaierror: - return host, port - return res[0][4] # type: ignore - - -def get_interface_ip(family: socket.AddressFamily) -> str: - """Get the IP address of an external interface. Used when binding to - 0.0.0.0 or ::1 to show a more useful URL. - - :meta private: - """ - # arbitrary private address - host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219" - - with socket.socket(family, socket.SOCK_DGRAM) as s: - try: - s.connect((host, 58162)) - except OSError: - return "::1" if family == socket.AF_INET6 else "127.0.0.1" - - return s.getsockname()[0] # type: ignore - - -class BaseWSGIServer(HTTPServer): - - """Simple single-threaded, single-process WSGI server.""" - - multithread = False - multiprocess = False - request_queue_size = LISTEN_QUEUE - - def __init__( - self, - host: str, - port: int, - app: "WSGIApplication", - handler: t.Optional[t.Type[WSGIRequestHandler]] = None, - passthrough_errors: bool = False, - ssl_context: t.Optional[_TSSLContextArg] = None, - fd: t.Optional[int] = None, - ) -> None: - if handler is None: - handler = WSGIRequestHandler - - self.address_family = select_address_family(host, port) - - if fd is not None: - real_sock = socket.fromfd(fd, self.address_family, socket.SOCK_STREAM) - port = 0 - - server_address = get_sockaddr(host, int(port), self.address_family) - - # remove socket file if it already exists - if self.address_family == af_unix: - server_address = t.cast(str, server_address) - - if os.path.exists(server_address): - os.unlink(server_address) - - super().__init__(server_address, handler) # type: ignore - - self.app = app - self.passthrough_errors = passthrough_errors - self.shutdown_signal = False - self.host = host - self.port = self.socket.getsockname()[1] - - # Patch in the original socket. - if fd is not None: - self.socket.close() - self.socket = real_sock - self.server_address = self.socket.getsockname() - - if ssl_context is not None: - if isinstance(ssl_context, tuple): - ssl_context = load_ssl_context(*ssl_context) - if ssl_context == "adhoc": - ssl_context = generate_adhoc_ssl_context() - - self.socket = ssl_context.wrap_socket(self.socket, server_side=True) - self.ssl_context: t.Optional["ssl.SSLContext"] = ssl_context - else: - self.ssl_context = None - - def log(self, type: str, message: str, *args: t.Any) -> None: - _log(type, message, *args) - - def serve_forever(self, poll_interval: float = 0.5) -> None: - self.shutdown_signal = False - try: - super().serve_forever(poll_interval=poll_interval) - except KeyboardInterrupt: - pass - finally: - self.server_close() - - def handle_error(self, request: t.Any, client_address: t.Tuple[str, int]) -> None: - if self.passthrough_errors: - raise - - return super().handle_error(request, client_address) - - -class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer): - - """A WSGI server that does threading.""" - - multithread = True - daemon_threads = True - - -class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): - - """A WSGI server that does forking.""" - - multiprocess = True - - def __init__( - self, - host: str, - port: int, - app: "WSGIApplication", - processes: int = 40, - handler: t.Optional[t.Type[WSGIRequestHandler]] = None, - passthrough_errors: bool = False, - ssl_context: t.Optional[_TSSLContextArg] = None, - fd: t.Optional[int] = None, - ) -> None: - if not can_fork: - raise ValueError("Your platform does not support forking.") - BaseWSGIServer.__init__( - self, host, port, app, handler, passthrough_errors, ssl_context, fd - ) - self.max_children = processes - - -def make_server( - host: str, - port: int, - app: "WSGIApplication", - threaded: bool = False, - processes: int = 1, - request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, - passthrough_errors: bool = False, - ssl_context: t.Optional[_TSSLContextArg] = None, - fd: t.Optional[int] = None, -) -> BaseWSGIServer: - """Create a new server instance that is either threaded, or forks - or just processes one request after another. - """ - if threaded and processes > 1: - raise ValueError("cannot have a multithreaded and multi process server.") - elif threaded: - return ThreadedWSGIServer( - host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd - ) - elif processes > 1: - return ForkingWSGIServer( - host, - port, - app, - processes, - request_handler, - passthrough_errors, - ssl_context, - fd=fd, - ) - else: - return BaseWSGIServer( - host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd - ) - - -def is_running_from_reloader() -> bool: - """Checks if the application is running from within the Werkzeug - reloader subprocess. - - .. versionadded:: 0.10 - """ - return os.environ.get("WERKZEUG_RUN_MAIN") == "true" - - -def run_simple( - hostname: str, - port: int, - application: "WSGIApplication", - use_reloader: bool = False, - use_debugger: bool = False, - use_evalex: bool = True, - extra_files: t.Optional[t.Iterable[str]] = None, - exclude_patterns: t.Optional[t.Iterable[str]] = None, - reloader_interval: int = 1, - reloader_type: str = "auto", - threaded: bool = False, - processes: int = 1, - request_handler: t.Optional[t.Type[WSGIRequestHandler]] = None, - static_files: t.Optional[t.Dict[str, t.Union[str, t.Tuple[str, str]]]] = None, - passthrough_errors: bool = False, - ssl_context: t.Optional[_TSSLContextArg] = None, -) -> None: - """Start a WSGI application. Optional features include a reloader, - multithreading and fork support. - - This function has a command-line interface too:: - - python -m werkzeug.serving --help - - .. versionchanged:: 2.0 - Added ``exclude_patterns`` parameter. - - .. versionadded:: 0.5 - `static_files` was added to simplify serving of static files as well - as `passthrough_errors`. - - .. versionadded:: 0.6 - support for SSL was added. - - .. versionadded:: 0.8 - Added support for automatically loading a SSL context from certificate - file and private key. - - .. versionadded:: 0.9 - Added command-line interface. - - .. versionadded:: 0.10 - Improved the reloader and added support for changing the backend - through the `reloader_type` parameter. See :ref:`reloader` - for more information. - - .. versionchanged:: 0.15 - Bind to a Unix socket by passing a path that starts with - ``unix://`` as the ``hostname``. - - :param hostname: The host to bind to, for example ``'localhost'``. - If the value is a path that starts with ``unix://`` it will bind - to a Unix socket instead of a TCP socket.. - :param port: The port for the server. eg: ``8080`` - :param application: the WSGI application to execute - :param use_reloader: should the server automatically restart the python - process if modules were changed? - :param use_debugger: should the werkzeug debugging system be used? - :param use_evalex: should the exception evaluation feature be enabled? - :param extra_files: a list of files the reloader should watch - additionally to the modules. For example configuration - files. - :param exclude_patterns: List of :mod:`fnmatch` patterns to ignore - when running the reloader. For example, ignore cache files that - shouldn't reload when updated. - :param reloader_interval: the interval for the reloader in seconds. - :param reloader_type: the type of reloader to use. The default is - auto detection. Valid values are ``'stat'`` and - ``'watchdog'``. See :ref:`reloader` for more - information. - :param threaded: should the process handle each request in a separate - thread? - :param processes: if greater than 1 then handle each request in a new process - up to this maximum number of concurrent processes. - :param request_handler: optional parameter that can be used to replace - the default one. You can use this to replace it - with a different - :class:`~BaseHTTPServer.BaseHTTPRequestHandler` - subclass. - :param static_files: a list or dict of paths for static files. This works - exactly like :class:`SharedDataMiddleware`, it's actually - just wrapping the application in that middleware before - serving. - :param passthrough_errors: set this to `True` to disable the error catching. - This means that the server will die on errors but - it can be useful to hook debuggers in (pdb etc.) - :param ssl_context: an SSL context for the connection. Either an - :class:`ssl.SSLContext`, a tuple in the form - ``(cert_file, pkey_file)``, the string ``'adhoc'`` if - the server should automatically create one, or ``None`` - to disable SSL (which is the default). - """ - if not isinstance(port, int): - raise TypeError("port must be an integer") - if use_debugger: - from .debug import DebuggedApplication - - application = DebuggedApplication(application, use_evalex) - if static_files: - from .middleware.shared_data import SharedDataMiddleware - - application = SharedDataMiddleware(application, static_files) - - def log_startup(sock: socket.socket) -> None: - all_addresses_message = ( - " * Running on all addresses.\n" - " WARNING: This is a development server. Do not use it in" - " a production deployment." - ) - - if sock.family == af_unix: - _log("info", " * Running on %s (Press CTRL+C to quit)", hostname) - else: - if hostname == "0.0.0.0": - _log("warning", all_addresses_message) - display_hostname = get_interface_ip(socket.AF_INET) - elif hostname == "::": - _log("warning", all_addresses_message) - display_hostname = get_interface_ip(socket.AF_INET6) - else: - display_hostname = hostname - - if ":" in display_hostname: - display_hostname = f"[{display_hostname}]" - - _log( - "info", - " * Running on %s://%s:%d/ (Press CTRL+C to quit)", - "http" if ssl_context is None else "https", - display_hostname, - sock.getsockname()[1], - ) - - def inner() -> None: - try: - fd: t.Optional[int] = int(os.environ["WERKZEUG_SERVER_FD"]) - except (LookupError, ValueError): - fd = None - srv = make_server( - hostname, - port, - application, - threaded, - processes, - request_handler, - passthrough_errors, - ssl_context, - fd=fd, - ) - if fd is None: - log_startup(srv.socket) - srv.serve_forever() - - if use_reloader: - # If we're not running already in the subprocess that is the - # reloader we want to open up a socket early to make sure the - # port is actually available. - if not is_running_from_reloader(): - if port == 0 and not can_open_by_fd: - raise ValueError( - "Cannot bind to a random port with enabled " - "reloader if the Python interpreter does " - "not support socket opening by fd." - ) - - # Create and destroy a socket so that any exceptions are - # raised before we spawn a separate Python interpreter and - # lose this ability. - address_family = select_address_family(hostname, port) - server_address = get_sockaddr(hostname, port, address_family) - s = socket.socket(address_family, socket.SOCK_STREAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind(server_address) - s.set_inheritable(True) - - # If we can open the socket by file descriptor, then we can just - # reuse this one and our socket will survive the restarts. - if can_open_by_fd: - os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno()) - s.listen(LISTEN_QUEUE) - log_startup(s) - else: - s.close() - if address_family == af_unix: - server_address = t.cast(str, server_address) - _log("info", "Unlinking %s", server_address) - os.unlink(server_address) - - from ._reloader import run_with_reloader as _rwr - - _rwr( - inner, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - interval=reloader_interval, - reloader_type=reloader_type, - ) - else: - inner() - - -def run_with_reloader(*args: t.Any, **kwargs: t.Any) -> None: - """Run a process with the reloader. This is not a public API, do - not use this function. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. - """ - from ._reloader import run_with_reloader as _rwr - - warnings.warn( - ( - "'run_with_reloader' is a private API, it will no longer be" - " accessible in Werkzeug 2.1. Use 'run_simple' instead." - ), - DeprecationWarning, - stacklevel=2, - ) - _rwr(*args, **kwargs) - - -def main() -> None: - """A simple command-line interface for :py:func:`run_simple`.""" - import argparse - from .utils import import_string - - _log("warning", "This CLI is deprecated and will be removed in version 2.1.") - - parser = argparse.ArgumentParser( - description="Run the given WSGI application with the development server.", - allow_abbrev=False, - ) - parser.add_argument( - "-b", - "--bind", - dest="address", - help="The hostname:port the app should listen on.", - ) - parser.add_argument( - "-d", - "--debug", - action="store_true", - help="Show the interactive debugger for unhandled exceptions.", - ) - parser.add_argument( - "-r", - "--reload", - action="store_true", - help="Reload the process if modules change.", - ) - parser.add_argument( - "application", help="Application to import and serve, in the form module:app." - ) - args = parser.parse_args() - hostname, port = None, None - - if args.address: - hostname, _, port = args.address.partition(":") - - run_simple( - hostname=hostname or "127.0.0.1", - port=int(port or 5000), - application=import_string(args.application), - use_reloader=args.reload, - use_debugger=args.debug, - ) - - -if __name__ == "__main__": - main() diff --git a/venv/lib/python3.8/site-packages/werkzeug/test.py b/venv/lib/python3.8/site-packages/werkzeug/test.py deleted file mode 100644 index 09cb7e8..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/test.py +++ /dev/null @@ -1,1326 +0,0 @@ -import mimetypes -import sys -import typing as t -import warnings -from collections import defaultdict -from datetime import datetime -from datetime import timedelta -from http.cookiejar import CookieJar -from io import BytesIO -from itertools import chain -from random import random -from tempfile import TemporaryFile -from time import time -from urllib.request import Request as _UrllibRequest - -from ._internal import _get_environ -from ._internal import _make_encode_wrapper -from ._internal import _wsgi_decoding_dance -from ._internal import _wsgi_encoding_dance -from .datastructures import Authorization -from .datastructures import CallbackDict -from .datastructures import CombinedMultiDict -from .datastructures import EnvironHeaders -from .datastructures import FileMultiDict -from .datastructures import Headers -from .datastructures import MultiDict -from .http import dump_cookie -from .http import dump_options_header -from .http import parse_options_header -from .sansio.multipart import Data -from .sansio.multipart import Epilogue -from .sansio.multipart import Field -from .sansio.multipart import File -from .sansio.multipart import MultipartEncoder -from .sansio.multipart import Preamble -from .urls import iri_to_uri -from .urls import url_encode -from .urls import url_fix -from .urls import url_parse -from .urls import url_unparse -from .urls import url_unquote -from .utils import get_content_type -from .wrappers.request import Request -from .wrappers.response import Response -from .wsgi import ClosingIterator -from .wsgi import get_current_url - -if t.TYPE_CHECKING: - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -def stream_encode_multipart( - data: t.Mapping[str, t.Any], - use_tempfile: bool = True, - threshold: int = 1024 * 500, - boundary: t.Optional[str] = None, - charset: str = "utf-8", -) -> t.Tuple[t.IO[bytes], int, str]: - """Encode a dict of values (either strings or file descriptors or - :class:`FileStorage` objects.) into a multipart encoded string stored - in a file descriptor. - """ - if boundary is None: - boundary = f"---------------WerkzeugFormPart_{time()}{random()}" - - stream: t.IO[bytes] = BytesIO() - total_length = 0 - on_disk = False - - if use_tempfile: - - def write_binary(s: bytes) -> int: - nonlocal stream, total_length, on_disk - - if on_disk: - return stream.write(s) - else: - length = len(s) - - if length + total_length <= threshold: - stream.write(s) - else: - new_stream = t.cast(t.IO[bytes], TemporaryFile("wb+")) - new_stream.write(stream.getvalue()) # type: ignore - new_stream.write(s) - stream = new_stream - on_disk = True - - total_length += length - return length - - else: - write_binary = stream.write - - encoder = MultipartEncoder(boundary.encode()) - write_binary(encoder.send_event(Preamble(data=b""))) - for key, value in _iter_data(data): - reader = getattr(value, "read", None) - if reader is not None: - filename = getattr(value, "filename", getattr(value, "name", None)) - content_type = getattr(value, "content_type", None) - if content_type is None: - content_type = ( - filename - and mimetypes.guess_type(filename)[0] - or "application/octet-stream" - ) - headers = Headers([("Content-Type", content_type)]) - if filename is None: - write_binary(encoder.send_event(Field(name=key, headers=headers))) - else: - write_binary( - encoder.send_event( - File(name=key, filename=filename, headers=headers) - ) - ) - while True: - chunk = reader(16384) - - if not chunk: - break - - write_binary(encoder.send_event(Data(data=chunk, more_data=True))) - else: - if not isinstance(value, str): - value = str(value) - write_binary(encoder.send_event(Field(name=key, headers=Headers()))) - write_binary( - encoder.send_event(Data(data=value.encode(charset), more_data=False)) - ) - - write_binary(encoder.send_event(Epilogue(data=b""))) - - length = stream.tell() - stream.seek(0) - return stream, length, boundary - - -def encode_multipart( - values: t.Mapping[str, t.Any], - boundary: t.Optional[str] = None, - charset: str = "utf-8", -) -> t.Tuple[str, bytes]: - """Like `stream_encode_multipart` but returns a tuple in the form - (``boundary``, ``data``) where data is bytes. - """ - stream, length, boundary = stream_encode_multipart( - values, use_tempfile=False, boundary=boundary, charset=charset - ) - return boundary, stream.read() - - -class _TestCookieHeaders: - """A headers adapter for cookielib""" - - def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: - self.headers = headers - - def getheaders(self, name: str) -> t.Iterable[str]: - headers = [] - name = name.lower() - for k, v in self.headers: - if k.lower() == name: - headers.append(v) - return headers - - def get_all( - self, name: str, default: t.Optional[t.Iterable[str]] = None - ) -> t.Iterable[str]: - headers = self.getheaders(name) - - if not headers: - return default # type: ignore - - return headers - - -class _TestCookieResponse: - """Something that looks like a httplib.HTTPResponse, but is actually just an - adapter for our test responses to make them available for cookielib. - """ - - def __init__(self, headers: t.Union[Headers, t.List[t.Tuple[str, str]]]) -> None: - self.headers = _TestCookieHeaders(headers) - - def info(self) -> _TestCookieHeaders: - return self.headers - - -class _TestCookieJar(CookieJar): - """A cookielib.CookieJar modified to inject and read cookie headers from - and to wsgi environments, and wsgi application responses. - """ - - def inject_wsgi(self, environ: "WSGIEnvironment") -> None: - """Inject the cookies as client headers into the server's wsgi - environment. - """ - cvals = [f"{c.name}={c.value}" for c in self] - - if cvals: - environ["HTTP_COOKIE"] = "; ".join(cvals) - else: - environ.pop("HTTP_COOKIE", None) - - def extract_wsgi( - self, - environ: "WSGIEnvironment", - headers: t.Union[Headers, t.List[t.Tuple[str, str]]], - ) -> None: - """Extract the server's set-cookie headers as cookies into the - cookie jar. - """ - self.extract_cookies( - _TestCookieResponse(headers), # type: ignore - _UrllibRequest(get_current_url(environ)), - ) - - -def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[t.Tuple[str, t.Any]]: - """Iterate over a mapping that might have a list of values, yielding - all key, value pairs. Almost like iter_multi_items but only allows - lists, not tuples, of values so tuples can be used for files. - """ - if isinstance(data, MultiDict): - yield from data.items(multi=True) - else: - for key, value in data.items(): - if isinstance(value, list): - for v in value: - yield key, v - else: - yield key, value - - -_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) - - -class EnvironBuilder: - """This class can be used to conveniently create a WSGI environment - for testing purposes. It can be used to quickly create WSGI environments - or request objects from arbitrary data. - - The signature of this class is also used in some other places as of - Werkzeug 0.5 (:func:`create_environ`, :meth:`Response.from_values`, - :meth:`Client.open`). Because of this most of the functionality is - available through the constructor alone. - - Files and regular form data can be manipulated independently of each - other with the :attr:`form` and :attr:`files` attributes, but are - passed with the same argument to the constructor: `data`. - - `data` can be any of these values: - - - a `str` or `bytes` object: The object is converted into an - :attr:`input_stream`, the :attr:`content_length` is set and you have to - provide a :attr:`content_type`. - - a `dict` or :class:`MultiDict`: The keys have to be strings. The values - have to be either any of the following objects, or a list of any of the - following objects: - - - a :class:`file`-like object: These are converted into - :class:`FileStorage` objects automatically. - - a `tuple`: The :meth:`~FileMultiDict.add_file` method is called - with the key and the unpacked `tuple` items as positional - arguments. - - a `str`: The string is set as form data for the associated key. - - a file-like object: The object content is loaded in memory and then - handled like a regular `str` or a `bytes`. - - :param path: the path of the request. In the WSGI environment this will - end up as `PATH_INFO`. If the `query_string` is not defined - and there is a question mark in the `path` everything after - it is used as query string. - :param base_url: the base URL is a URL that is used to extract the WSGI - URL scheme, host (server name + server port) and the - script root (`SCRIPT_NAME`). - :param query_string: an optional string or dict with URL parameters. - :param method: the HTTP method to use, defaults to `GET`. - :param input_stream: an optional input stream. Do not specify this and - `data`. As soon as an input stream is set you can't - modify :attr:`args` and :attr:`files` unless you - set the :attr:`input_stream` to `None` again. - :param content_type: The content type for the request. As of 0.5 you - don't have to provide this when specifying files - and form data via `data`. - :param content_length: The content length for the request. You don't - have to specify this when providing data via - `data`. - :param errors_stream: an optional error stream that is used for - `wsgi.errors`. Defaults to :data:`stderr`. - :param multithread: controls `wsgi.multithread`. Defaults to `False`. - :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. - :param run_once: controls `wsgi.run_once`. Defaults to `False`. - :param headers: an optional list or :class:`Headers` object of headers. - :param data: a string or dict of form data or a file-object. - See explanation above. - :param json: An object to be serialized and assigned to ``data``. - Defaults the content type to ``"application/json"``. - Serialized with the function assigned to :attr:`json_dumps`. - :param environ_base: an optional dict of environment defaults. - :param environ_overrides: an optional dict of environment overrides. - :param charset: the charset used to encode string data. - :param auth: An authorization object to use for the - ``Authorization`` header value. A ``(username, password)`` tuple - is a shortcut for ``Basic`` authorization. - - .. versionchanged:: 2.0 - ``REQUEST_URI`` and ``RAW_URI`` is the full raw URI including - the query string, not only the path. - - .. versionchanged:: 2.0 - The default :attr:`request_class` is ``Request`` instead of - ``BaseRequest``. - - .. versionadded:: 2.0 - Added the ``auth`` parameter. - - .. versionadded:: 0.15 - The ``json`` param and :meth:`json_dumps` method. - - .. versionadded:: 0.15 - The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing - the path before perecent-decoding. This is not part of the WSGI - PEP, but many WSGI servers include it. - - .. versionchanged:: 0.6 - ``path`` and ``base_url`` can now be unicode strings that are - encoded with :func:`iri_to_uri`. - """ - - #: the server protocol to use. defaults to HTTP/1.1 - server_protocol = "HTTP/1.1" - - #: the wsgi version to use. defaults to (1, 0) - wsgi_version = (1, 0) - - #: The default request class used by :meth:`get_request`. - request_class = Request - - import json - - #: The serialization function used when ``json`` is passed. - json_dumps = staticmethod(json.dumps) - del json - - _args: t.Optional[MultiDict] - _query_string: t.Optional[str] - _input_stream: t.Optional[t.IO[bytes]] - _form: t.Optional[MultiDict] - _files: t.Optional[FileMultiDict] - - def __init__( - self, - path: str = "/", - base_url: t.Optional[str] = None, - query_string: t.Optional[t.Union[t.Mapping[str, str], str]] = None, - method: str = "GET", - input_stream: t.Optional[t.IO[bytes]] = None, - content_type: t.Optional[str] = None, - content_length: t.Optional[int] = None, - errors_stream: t.Optional[t.IO[str]] = None, - multithread: bool = False, - multiprocess: bool = False, - run_once: bool = False, - headers: t.Optional[t.Union[Headers, t.Iterable[t.Tuple[str, str]]]] = None, - data: t.Optional[ - t.Union[t.IO[bytes], str, bytes, t.Mapping[str, t.Any]] - ] = None, - environ_base: t.Optional[t.Mapping[str, t.Any]] = None, - environ_overrides: t.Optional[t.Mapping[str, t.Any]] = None, - charset: str = "utf-8", - mimetype: t.Optional[str] = None, - json: t.Optional[t.Mapping[str, t.Any]] = None, - auth: t.Optional[t.Union[Authorization, t.Tuple[str, str]]] = None, - ) -> None: - path_s = _make_encode_wrapper(path) - if query_string is not None and path_s("?") in path: - raise ValueError("Query string is defined in the path and as an argument") - request_uri = url_parse(path) - if query_string is None and path_s("?") in path: - query_string = request_uri.query - self.charset = charset - self.path = iri_to_uri(request_uri.path) - self.request_uri = path - if base_url is not None: - base_url = url_fix(iri_to_uri(base_url, charset), charset) - self.base_url = base_url # type: ignore - if isinstance(query_string, (bytes, str)): - self.query_string = query_string - else: - if query_string is None: - query_string = MultiDict() - elif not isinstance(query_string, MultiDict): - query_string = MultiDict(query_string) - self.args = query_string - self.method = method - if headers is None: - headers = Headers() - elif not isinstance(headers, Headers): - headers = Headers(headers) - self.headers = headers - if content_type is not None: - self.content_type = content_type - if errors_stream is None: - errors_stream = sys.stderr - self.errors_stream = errors_stream - self.multithread = multithread - self.multiprocess = multiprocess - self.run_once = run_once - self.environ_base = environ_base - self.environ_overrides = environ_overrides - self.input_stream = input_stream - self.content_length = content_length - self.closed = False - - if auth is not None: - if isinstance(auth, tuple): - auth = Authorization( - "basic", {"username": auth[0], "password": auth[1]} - ) - - self.headers.set("Authorization", auth.to_header()) - - if json is not None: - if data is not None: - raise TypeError("can't provide both json and data") - - data = self.json_dumps(json) - - if self.content_type is None: - self.content_type = "application/json" - - if data: - if input_stream is not None: - raise TypeError("can't provide input stream and data") - if hasattr(data, "read"): - data = data.read() # type: ignore - if isinstance(data, str): - data = data.encode(self.charset) - if isinstance(data, bytes): - self.input_stream = BytesIO(data) - if self.content_length is None: - self.content_length = len(data) - else: - for key, value in _iter_data(data): # type: ignore - if isinstance(value, (tuple, dict)) or hasattr(value, "read"): - self._add_file_from_data(key, value) - else: - self.form.setlistdefault(key).append(value) - - if mimetype is not None: - self.mimetype = mimetype - - @classmethod - def from_environ( - cls, environ: "WSGIEnvironment", **kwargs: t.Any - ) -> "EnvironBuilder": - """Turn an environ dict back into a builder. Any extra kwargs - override the args extracted from the environ. - - .. versionchanged:: 2.0 - Path and query values are passed through the WSGI decoding - dance to avoid double encoding. - - .. versionadded:: 0.15 - """ - headers = Headers(EnvironHeaders(environ)) - out = { - "path": _wsgi_decoding_dance(environ["PATH_INFO"]), - "base_url": cls._make_base_url( - environ["wsgi.url_scheme"], - headers.pop("Host"), - _wsgi_decoding_dance(environ["SCRIPT_NAME"]), - ), - "query_string": _wsgi_decoding_dance(environ["QUERY_STRING"]), - "method": environ["REQUEST_METHOD"], - "input_stream": environ["wsgi.input"], - "content_type": headers.pop("Content-Type", None), - "content_length": headers.pop("Content-Length", None), - "errors_stream": environ["wsgi.errors"], - "multithread": environ["wsgi.multithread"], - "multiprocess": environ["wsgi.multiprocess"], - "run_once": environ["wsgi.run_once"], - "headers": headers, - } - out.update(kwargs) - return cls(**out) - - def _add_file_from_data( - self, - key: str, - value: t.Union[ - t.IO[bytes], t.Tuple[t.IO[bytes], str], t.Tuple[t.IO[bytes], str, str] - ], - ) -> None: - """Called in the EnvironBuilder to add files from the data dict.""" - if isinstance(value, tuple): - self.files.add_file(key, *value) - else: - self.files.add_file(key, value) - - @staticmethod - def _make_base_url(scheme: str, host: str, script_root: str) -> str: - return url_unparse((scheme, host, script_root, "", "")).rstrip("/") + "/" - - @property - def base_url(self) -> str: - """The base URL is used to extract the URL scheme, host name, - port, and root path. - """ - return self._make_base_url(self.url_scheme, self.host, self.script_root) - - @base_url.setter - def base_url(self, value: t.Optional[str]) -> None: - if value is None: - scheme = "http" - netloc = "localhost" - script_root = "" - else: - scheme, netloc, script_root, qs, anchor = url_parse(value) - if qs or anchor: - raise ValueError("base url must not contain a query string or fragment") - self.script_root = script_root.rstrip("/") - self.host = netloc - self.url_scheme = scheme - - @property - def content_type(self) -> t.Optional[str]: - """The content type for the request. Reflected from and to - the :attr:`headers`. Do not set if you set :attr:`files` or - :attr:`form` for auto detection. - """ - ct = self.headers.get("Content-Type") - if ct is None and not self._input_stream: - if self._files: - return "multipart/form-data" - if self._form: - return "application/x-www-form-urlencoded" - return None - return ct - - @content_type.setter - def content_type(self, value: t.Optional[str]) -> None: - if value is None: - self.headers.pop("Content-Type", None) - else: - self.headers["Content-Type"] = value - - @property - def mimetype(self) -> t.Optional[str]: - """The mimetype (content type without charset etc.) - - .. versionadded:: 0.14 - """ - ct = self.content_type - return ct.split(";")[0].strip() if ct else None - - @mimetype.setter - def mimetype(self, value: str) -> None: - self.content_type = get_content_type(value, self.charset) - - @property - def mimetype_params(self) -> t.Mapping[str, str]: - """The mimetype parameters as dict. For example if the - content type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - - .. versionadded:: 0.14 - """ - - def on_update(d: CallbackDict) -> None: - self.headers["Content-Type"] = dump_options_header(self.mimetype, d) - - d = parse_options_header(self.headers.get("content-type", ""))[1] - return CallbackDict(d, on_update) - - @property - def content_length(self) -> t.Optional[int]: - """The content length as integer. Reflected from and to the - :attr:`headers`. Do not set if you set :attr:`files` or - :attr:`form` for auto detection. - """ - return self.headers.get("Content-Length", type=int) - - @content_length.setter - def content_length(self, value: t.Optional[int]) -> None: - if value is None: - self.headers.pop("Content-Length", None) - else: - self.headers["Content-Length"] = str(value) - - def _get_form(self, name: str, storage: t.Type[_TAnyMultiDict]) -> _TAnyMultiDict: - """Common behavior for getting the :attr:`form` and - :attr:`files` properties. - - :param name: Name of the internal cached attribute. - :param storage: Storage class used for the data. - """ - if self.input_stream is not None: - raise AttributeError("an input stream is defined") - - rv = getattr(self, name) - - if rv is None: - rv = storage() - setattr(self, name, rv) - - return rv # type: ignore - - def _set_form(self, name: str, value: MultiDict) -> None: - """Common behavior for setting the :attr:`form` and - :attr:`files` properties. - - :param name: Name of the internal cached attribute. - :param value: Value to assign to the attribute. - """ - self._input_stream = None - setattr(self, name, value) - - @property - def form(self) -> MultiDict: - """A :class:`MultiDict` of form values.""" - return self._get_form("_form", MultiDict) - - @form.setter - def form(self, value: MultiDict) -> None: - self._set_form("_form", value) - - @property - def files(self) -> FileMultiDict: - """A :class:`FileMultiDict` of uploaded files. Use - :meth:`~FileMultiDict.add_file` to add new files. - """ - return self._get_form("_files", FileMultiDict) - - @files.setter - def files(self, value: FileMultiDict) -> None: - self._set_form("_files", value) - - @property - def input_stream(self) -> t.Optional[t.IO[bytes]]: - """An optional input stream. This is mutually exclusive with - setting :attr:`form` and :attr:`files`, setting it will clear - those. Do not provide this if the method is not ``POST`` or - another method that has a body. - """ - return self._input_stream - - @input_stream.setter - def input_stream(self, value: t.Optional[t.IO[bytes]]) -> None: - self._input_stream = value - self._form = None - self._files = None - - @property - def query_string(self) -> str: - """The query string. If you set this to a string - :attr:`args` will no longer be available. - """ - if self._query_string is None: - if self._args is not None: - return url_encode(self._args, charset=self.charset) - return "" - return self._query_string - - @query_string.setter - def query_string(self, value: t.Optional[str]) -> None: - self._query_string = value - self._args = None - - @property - def args(self) -> MultiDict: - """The URL arguments as :class:`MultiDict`.""" - if self._query_string is not None: - raise AttributeError("a query string is defined") - if self._args is None: - self._args = MultiDict() - return self._args - - @args.setter - def args(self, value: t.Optional[MultiDict]) -> None: - self._query_string = None - self._args = value - - @property - def server_name(self) -> str: - """The server name (read-only, use :attr:`host` to set)""" - return self.host.split(":", 1)[0] - - @property - def server_port(self) -> int: - """The server port as integer (read-only, use :attr:`host` to set)""" - pieces = self.host.split(":", 1) - if len(pieces) == 2 and pieces[1].isdigit(): - return int(pieces[1]) - if self.url_scheme == "https": - return 443 - return 80 - - def __del__(self) -> None: - try: - self.close() - except Exception: - pass - - def close(self) -> None: - """Closes all files. If you put real :class:`file` objects into the - :attr:`files` dict you can call this method to automatically close - them all in one go. - """ - if self.closed: - return - try: - files = self.files.values() - except AttributeError: - files = () # type: ignore - for f in files: - try: - f.close() - except Exception: - pass - self.closed = True - - def get_environ(self) -> "WSGIEnvironment": - """Return the built environ. - - .. versionchanged:: 0.15 - The content type and length headers are set based on - input stream detection. Previously this only set the WSGI - keys. - """ - input_stream = self.input_stream - content_length = self.content_length - - mimetype = self.mimetype - content_type = self.content_type - - if input_stream is not None: - start_pos = input_stream.tell() - input_stream.seek(0, 2) - end_pos = input_stream.tell() - input_stream.seek(start_pos) - content_length = end_pos - start_pos - elif mimetype == "multipart/form-data": - input_stream, content_length, boundary = stream_encode_multipart( - CombinedMultiDict([self.form, self.files]), charset=self.charset - ) - content_type = f'{mimetype}; boundary="{boundary}"' - elif mimetype == "application/x-www-form-urlencoded": - form_encoded = url_encode(self.form, charset=self.charset).encode("ascii") - content_length = len(form_encoded) - input_stream = BytesIO(form_encoded) - else: - input_stream = BytesIO() - - result: "WSGIEnvironment" = {} - if self.environ_base: - result.update(self.environ_base) - - def _path_encode(x: str) -> str: - return _wsgi_encoding_dance(url_unquote(x, self.charset), self.charset) - - raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset) - result.update( - { - "REQUEST_METHOD": self.method, - "SCRIPT_NAME": _path_encode(self.script_root), - "PATH_INFO": _path_encode(self.path), - "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset), - # Non-standard, added by mod_wsgi, uWSGI - "REQUEST_URI": raw_uri, - # Non-standard, added by gunicorn - "RAW_URI": raw_uri, - "SERVER_NAME": self.server_name, - "SERVER_PORT": str(self.server_port), - "HTTP_HOST": self.host, - "SERVER_PROTOCOL": self.server_protocol, - "wsgi.version": self.wsgi_version, - "wsgi.url_scheme": self.url_scheme, - "wsgi.input": input_stream, - "wsgi.errors": self.errors_stream, - "wsgi.multithread": self.multithread, - "wsgi.multiprocess": self.multiprocess, - "wsgi.run_once": self.run_once, - } - ) - - headers = self.headers.copy() - - if content_type is not None: - result["CONTENT_TYPE"] = content_type - headers.set("Content-Type", content_type) - - if content_length is not None: - result["CONTENT_LENGTH"] = str(content_length) - headers.set("Content-Length", content_length) - - combined_headers = defaultdict(list) - - for key, value in headers.to_wsgi_list(): - combined_headers[f"HTTP_{key.upper().replace('-', '_')}"].append(value) - - for key, values in combined_headers.items(): - result[key] = ", ".join(values) - - if self.environ_overrides: - result.update(self.environ_overrides) - - return result - - def get_request(self, cls: t.Optional[t.Type[Request]] = None) -> Request: - """Returns a request with the data. If the request class is not - specified :attr:`request_class` is used. - - :param cls: The request wrapper to use. - """ - if cls is None: - cls = self.request_class - - return cls(self.get_environ()) - - -class ClientRedirectError(Exception): - """If a redirect loop is detected when using follow_redirects=True with - the :cls:`Client`, then this exception is raised. - """ - - -class Client: - """This class allows you to send requests to a wrapped application. - - The use_cookies parameter indicates whether cookies should be stored and - sent for subsequent requests. This is True by default, but passing False - will disable this behaviour. - - If you want to request some subdomain of your application you may set - `allow_subdomain_redirects` to `True` as if not no external redirects - are allowed. - - .. versionchanged:: 2.0 - ``response_wrapper`` is always a subclass of - :class:``TestResponse``. - - .. versionchanged:: 0.5 - Added the ``use_cookies`` parameter. - """ - - def __init__( - self, - application: "WSGIApplication", - response_wrapper: t.Optional[t.Type["Response"]] = None, - use_cookies: bool = True, - allow_subdomain_redirects: bool = False, - ) -> None: - self.application = application - - if response_wrapper in {None, Response}: - response_wrapper = TestResponse - elif not isinstance(response_wrapper, TestResponse): - response_wrapper = type( - "WrapperTestResponse", - (TestResponse, response_wrapper), # type: ignore - {}, - ) - - self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper) - - if use_cookies: - self.cookie_jar: t.Optional[_TestCookieJar] = _TestCookieJar() - else: - self.cookie_jar = None - - self.allow_subdomain_redirects = allow_subdomain_redirects - - def set_cookie( - self, - server_name: str, - key: str, - value: str = "", - max_age: t.Optional[t.Union[timedelta, int]] = None, - expires: t.Optional[t.Union[str, datetime, int, float]] = None, - path: str = "/", - domain: t.Optional[str] = None, - secure: bool = False, - httponly: bool = False, - samesite: t.Optional[str] = None, - charset: str = "utf-8", - ) -> None: - """Sets a cookie in the client's cookie jar. The server name - is required and has to match the one that is also passed to - the open call. - """ - assert self.cookie_jar is not None, "cookies disabled" - header = dump_cookie( - key, - value, - max_age, - expires, - path, - domain, - secure, - httponly, - charset, - samesite=samesite, - ) - environ = create_environ(path, base_url=f"http://{server_name}") - headers = [("Set-Cookie", header)] - self.cookie_jar.extract_wsgi(environ, headers) - - def delete_cookie( - self, - server_name: str, - key: str, - path: str = "/", - domain: t.Optional[str] = None, - secure: bool = False, - httponly: bool = False, - samesite: t.Optional[str] = None, - ) -> None: - """Deletes a cookie in the test client.""" - self.set_cookie( - server_name, - key, - expires=0, - max_age=0, - path=path, - domain=domain, - secure=secure, - httponly=httponly, - samesite=samesite, - ) - - def run_wsgi_app( - self, environ: "WSGIEnvironment", buffered: bool = False - ) -> t.Tuple[t.Iterable[bytes], str, Headers]: - """Runs the wrapped WSGI app with the given environment. - - :meta private: - """ - if self.cookie_jar is not None: - self.cookie_jar.inject_wsgi(environ) - - rv = run_wsgi_app(self.application, environ, buffered=buffered) - - if self.cookie_jar is not None: - self.cookie_jar.extract_wsgi(environ, rv[2]) - - return rv - - def resolve_redirect( - self, response: "TestResponse", buffered: bool = False - ) -> "TestResponse": - """Perform a new request to the location given by the redirect - response to the previous request. - - :meta private: - """ - scheme, netloc, path, qs, anchor = url_parse(response.location) - builder = EnvironBuilder.from_environ(response.request.environ, query_string=qs) - - to_name_parts = netloc.split(":", 1)[0].split(".") - from_name_parts = builder.server_name.split(".") - - if to_name_parts != [""]: - # The new location has a host, use it for the base URL. - builder.url_scheme = scheme - builder.host = netloc - else: - # A local redirect with autocorrect_location_header=False - # doesn't have a host, so use the request's host. - to_name_parts = from_name_parts - - # Explain why a redirect to a different server name won't be followed. - if to_name_parts != from_name_parts: - if to_name_parts[-len(from_name_parts) :] == from_name_parts: - if not self.allow_subdomain_redirects: - raise RuntimeError("Following subdomain redirects is not enabled.") - else: - raise RuntimeError("Following external redirects is not supported.") - - path_parts = path.split("/") - root_parts = builder.script_root.split("/") - - if path_parts[: len(root_parts)] == root_parts: - # Strip the script root from the path. - builder.path = path[len(builder.script_root) :] - else: - # The new location is not under the script root, so use the - # whole path and clear the previous root. - builder.path = path - builder.script_root = "" - - # Only 307 and 308 preserve all of the original request. - if response.status_code not in {307, 308}: - # HEAD is preserved, everything else becomes GET. - if builder.method != "HEAD": - builder.method = "GET" - - # Clear the body and the headers that describe it. - - if builder.input_stream is not None: - builder.input_stream.close() - builder.input_stream = None - - builder.content_type = None - builder.content_length = None - builder.headers.pop("Transfer-Encoding", None) - - return self.open(builder, buffered=buffered) - - def open( - self, - *args: t.Any, - as_tuple: bool = False, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> "TestResponse": - """Generate an environ dict from the given arguments, make a - request to the application using it, and return the response. - - :param args: Passed to :class:`EnvironBuilder` to create the - environ for the request. If a single arg is passed, it can - be an existing :class:`EnvironBuilder` or an environ dict. - :param buffered: Convert the iterator returned by the app into - a list. If the iterator has a ``close()`` method, it is - called automatically. - :param follow_redirects: Make additional requests to follow HTTP - redirects until a non-redirect status is returned. - :attr:`TestResponse.history` lists the intermediate - responses. - - .. versionchanged:: 2.0 - ``as_tuple`` is deprecated and will be removed in Werkzeug - 2.1. Use :attr:`TestResponse.request` and - ``request.environ`` instead. - - .. versionchanged:: 2.0 - The request input stream is closed when calling - ``response.close()``. Input streams for redirects are - automatically closed. - - .. versionchanged:: 0.5 - If a dict is provided as file in the dict for the ``data`` - parameter the content type has to be called ``content_type`` - instead of ``mimetype``. This change was made for - consistency with :class:`werkzeug.FileWrapper`. - - .. versionchanged:: 0.5 - Added the ``follow_redirects`` parameter. - """ - request: t.Optional["Request"] = None - - if not kwargs and len(args) == 1: - arg = args[0] - - if isinstance(arg, EnvironBuilder): - request = arg.get_request() - elif isinstance(arg, dict): - request = EnvironBuilder.from_environ(arg).get_request() - elif isinstance(arg, Request): - request = arg - - if request is None: - builder = EnvironBuilder(*args, **kwargs) - - try: - request = builder.get_request() - finally: - builder.close() - - response = self.run_wsgi_app(request.environ, buffered=buffered) - response = self.response_wrapper(*response, request=request) - - redirects = set() - history: t.List["TestResponse"] = [] - - while follow_redirects and response.status_code in { - 301, - 302, - 303, - 305, - 307, - 308, - }: - # Exhaust intermediate response bodies to ensure middleware - # that returns an iterator runs any cleanup code. - if not buffered: - response.make_sequence() - response.close() - - new_redirect_entry = (response.location, response.status_code) - - if new_redirect_entry in redirects: - raise ClientRedirectError( - f"Loop detected: A {response.status_code} redirect" - f" to {response.location} was already made." - ) - - redirects.add(new_redirect_entry) - response.history = tuple(history) - history.append(response) - response = self.resolve_redirect(response, buffered=buffered) - else: - # This is the final request after redirects, or not - # following redirects. - response.history = tuple(history) - # Close the input stream when closing the response, in case - # the input is an open temporary file. - response.call_on_close(request.input_stream.close) - - if as_tuple: - warnings.warn( - "'as_tuple' is deprecated and will be removed in" - " Werkzeug 2.1. Access 'response.request.environ'" - " instead.", - DeprecationWarning, - stacklevel=2, - ) - return request.environ, response # type: ignore - - return response - - def get(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``GET``.""" - kw["method"] = "GET" - return self.open(*args, **kw) - - def post(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``POST``.""" - kw["method"] = "POST" - return self.open(*args, **kw) - - def put(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``PUT``.""" - kw["method"] = "PUT" - return self.open(*args, **kw) - - def delete(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``DELETE``.""" - kw["method"] = "DELETE" - return self.open(*args, **kw) - - def patch(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``PATCH``.""" - kw["method"] = "PATCH" - return self.open(*args, **kw) - - def options(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``OPTIONS``.""" - kw["method"] = "OPTIONS" - return self.open(*args, **kw) - - def head(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``HEAD``.""" - kw["method"] = "HEAD" - return self.open(*args, **kw) - - def trace(self, *args: t.Any, **kw: t.Any) -> "TestResponse": - """Call :meth:`open` with ``method`` set to ``TRACE``.""" - kw["method"] = "TRACE" - return self.open(*args, **kw) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.application!r}>" - - -def create_environ(*args: t.Any, **kwargs: t.Any) -> "WSGIEnvironment": - """Create a new WSGI environ dict based on the values passed. The first - parameter should be the path of the request which defaults to '/'. The - second one can either be an absolute path (in that case the host is - localhost:80) or a full path to the request with scheme, netloc port and - the path to the script. - - This accepts the same arguments as the :class:`EnvironBuilder` - constructor. - - .. versionchanged:: 0.5 - This function is now a thin wrapper over :class:`EnvironBuilder` which - was added in 0.5. The `headers`, `environ_base`, `environ_overrides` - and `charset` parameters were added. - """ - builder = EnvironBuilder(*args, **kwargs) - - try: - return builder.get_environ() - finally: - builder.close() - - -def run_wsgi_app( - app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False -) -> t.Tuple[t.Iterable[bytes], str, Headers]: - """Return a tuple in the form (app_iter, status, headers) of the - application output. This works best if you pass it an application that - returns an iterator all the time. - - Sometimes applications may use the `write()` callable returned - by the `start_response` function. This tries to resolve such edge - cases automatically. But if you don't get the expected output you - should set `buffered` to `True` which enforces buffering. - - If passed an invalid WSGI application the behavior of this function is - undefined. Never pass non-conforming WSGI applications to this function. - - :param app: the application to execute. - :param buffered: set to `True` to enforce buffering. - :return: tuple in the form ``(app_iter, status, headers)`` - """ - # Copy environ to ensure any mutations by the app (ProxyFix, for - # example) don't affect subsequent requests (such as redirects). - environ = _get_environ(environ).copy() - status: str - response: t.Optional[t.Tuple[str, t.List[t.Tuple[str, str]]]] = None - buffer: t.List[bytes] = [] - - def start_response(status, headers, exc_info=None): # type: ignore - nonlocal response - - if exc_info: - try: - raise exc_info[1].with_traceback(exc_info[2]) - finally: - exc_info = None - - response = (status, headers) - return buffer.append - - app_rv = app(environ, start_response) - close_func = getattr(app_rv, "close", None) - app_iter: t.Iterable[bytes] = iter(app_rv) - - # when buffering we emit the close call early and convert the - # application iterator into a regular list - if buffered: - try: - app_iter = list(app_iter) - finally: - if close_func is not None: - close_func() - - # otherwise we iterate the application iter until we have a response, chain - # the already received data with the already collected data and wrap it in - # a new `ClosingIterator` if we need to restore a `close` callable from the - # original return value. - else: - for item in app_iter: - buffer.append(item) - - if response is not None: - break - - if buffer: - app_iter = chain(buffer, app_iter) - - if close_func is not None and app_iter is not app_rv: - app_iter = ClosingIterator(app_iter, close_func) - - status, headers = response # type: ignore - return app_iter, status, Headers(headers) - - -class TestResponse(Response): - """:class:`~werkzeug.wrappers.Response` subclass that provides extra - information about requests made with the test :class:`Client`. - - Test client requests will always return an instance of this class. - If a custom response class is passed to the client, it is - subclassed along with this to support test information. - - If the test request included large files, or if the application is - serving a file, call :meth:`close` to close any open files and - prevent Python showing a ``ResourceWarning``. - """ - - request: Request - """A request object with the environ used to make the request that - resulted in this response. - """ - - history: t.Tuple["TestResponse", ...] - """A list of intermediate responses. Populated when the test request - is made with ``follow_redirects`` enabled. - """ - - def __init__( - self, - response: t.Iterable[bytes], - status: str, - headers: Headers, - request: Request, - history: t.Tuple["TestResponse"] = (), # type: ignore - **kwargs: t.Any, - ) -> None: - super().__init__(response, status, headers, **kwargs) - self.request = request - self.history = history - self._compat_tuple = response, status, headers - - def __iter__(self) -> t.Iterator: - warnings.warn( - ( - "The test client no longer returns a tuple, it returns" - " a 'TestResponse'. Tuple unpacking is deprecated and" - " will be removed in Werkzeug 2.1. Access the" - " attributes 'data', 'status', and 'headers' instead." - ), - DeprecationWarning, - stacklevel=2, - ) - return iter(self._compat_tuple) - - def __getitem__(self, item: int) -> t.Any: - warnings.warn( - ( - "The test client no longer returns a tuple, it returns" - " a 'TestResponse'. Item indexing is deprecated and" - " will be removed in Werkzeug 2.1. Access the" - " attributes 'data', 'status', and 'headers' instead." - ), - DeprecationWarning, - stacklevel=2, - ) - return self._compat_tuple[item] diff --git a/venv/lib/python3.8/site-packages/werkzeug/testapp.py b/venv/lib/python3.8/site-packages/werkzeug/testapp.py deleted file mode 100644 index 981f887..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/testapp.py +++ /dev/null @@ -1,240 +0,0 @@ -"""A small application that can be used to test a WSGI server and check -it for WSGI compliance. -""" -import base64 -import os -import sys -import typing as t -from html import escape -from textwrap import wrap - -from . import __version__ as _werkzeug_version -from .wrappers.request import Request -from .wrappers.response import Response - -if t.TYPE_CHECKING: - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - - -logo = Response( - base64.b64decode( - """ -R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP///////// -//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv -nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25 -7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq -ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX -m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G -p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo -SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf -78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA -ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA -tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx -w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx -lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45 -Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB -yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd -dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r -idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh -EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8 -ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64 -gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C -JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y -Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9 -YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX -c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb -qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL -cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG -cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2 -KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe -EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb -UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB -Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z -aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn -kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs -=""" - ), - mimetype="image/png", -) - - -TEMPLATE = """\ - -WSGI Information - -
    - -

    WSGI Information

    -

    - This page displays all available information about the WSGI server and - the underlying Python interpreter. -

    Python Interpreter

    - - - - - - -
    Python Version - %(python_version)s -
    Platform - %(platform)s [%(os)s] -
    API Version - %(api_version)s -
    Byteorder - %(byteorder)s -
    Werkzeug Version - %(werkzeug_version)s -
    -

    WSGI Environment

    - %(wsgi_env)s
    -

    Installed Eggs

    -

    - The following python packages were installed on the system as - Python eggs: -

      %(python_eggs)s
    -

    System Path

    -

    - The following paths are the current contents of the load path. The - following entries are looked up for Python packages. Note that not - all items in this path are folders. Gray and underlined items are - entries pointing to invalid resources or used by custom import hooks - such as the zip importer. -

    - Items with a bright background were expanded for display from a relative - path. If you encounter such paths in the output you might want to check - your setup as relative paths are usually problematic in multithreaded - environments. -

      %(sys_path)s
    -
    -""" - - -def iter_sys_path() -> t.Iterator[t.Tuple[str, bool, bool]]: - if os.name == "posix": - - def strip(x: str) -> str: - prefix = os.path.expanduser("~") - if x.startswith(prefix): - x = f"~{x[len(prefix) :]}" - return x - - else: - - def strip(x: str) -> str: - return x - - cwd = os.path.abspath(os.getcwd()) - for item in sys.path: - path = os.path.join(cwd, item or os.path.curdir) - yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item - - -def render_testapp(req: Request) -> bytes: - try: - import pkg_resources - except ImportError: - eggs: t.Iterable[t.Any] = () - else: - eggs = sorted( - pkg_resources.working_set, - key=lambda x: x.project_name.lower(), # type: ignore - ) - python_eggs = [] - for egg in eggs: - try: - version = egg.version - except (ValueError, AttributeError): - version = "unknown" - python_eggs.append( - f"
  • {escape(egg.project_name)} [{escape(version)}]" - ) - - wsgi_env = [] - sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower()) - for key, value in sorted_environ: - value = "".join(wrap(escape(repr(value)))) - wsgi_env.append(f"{escape(str(key))}{value}") - - sys_path = [] - for item, virtual, expanded in iter_sys_path(): - class_ = [] - if virtual: - class_.append("virtual") - if expanded: - class_.append("exp") - class_ = f' class="{" ".join(class_)}"' if class_ else "" - sys_path.append(f"{escape(item)}") - - return ( - TEMPLATE - % { - "python_version": "
    ".join(escape(sys.version).splitlines()), - "platform": escape(sys.platform), - "os": escape(os.name), - "api_version": sys.api_version, - "byteorder": sys.byteorder, - "werkzeug_version": _werkzeug_version, - "python_eggs": "\n".join(python_eggs), - "wsgi_env": "\n".join(wsgi_env), - "sys_path": "\n".join(sys_path), - } - ).encode("utf-8") - - -def test_app( - environ: "WSGIEnvironment", start_response: "StartResponse" -) -> t.Iterable[bytes]: - """Simple test application that dumps the environment. You can use - it to check if Werkzeug is working properly: - - .. sourcecode:: pycon - - >>> from werkzeug.serving import run_simple - >>> from werkzeug.testapp import test_app - >>> run_simple('localhost', 3000, test_app) - * Running on http://localhost:3000/ - - The application displays important information from the WSGI environment, - the Python interpreter and the installed libraries. - """ - req = Request(environ, populate_request=False) - if req.args.get("resource") == "logo": - response = logo - else: - response = Response(render_testapp(req), mimetype="text/html") - return response(environ, start_response) - - -if __name__ == "__main__": - from .serving import run_simple - - run_simple("localhost", 5000, test_app, use_reloader=True) diff --git a/venv/lib/python3.8/site-packages/werkzeug/urls.py b/venv/lib/python3.8/site-packages/werkzeug/urls.py deleted file mode 100644 index 9529da0..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/urls.py +++ /dev/null @@ -1,1211 +0,0 @@ -"""Functions for working with URLs. - -Contains implementations of functions from :mod:`urllib.parse` that -handle bytes and strings. -""" -import codecs -import os -import re -import typing as t -import warnings - -from ._internal import _check_str_tuple -from ._internal import _decode_idna -from ._internal import _encode_idna -from ._internal import _make_encode_wrapper -from ._internal import _to_str - -if t.TYPE_CHECKING: - from . import datastructures as ds - -# A regular expression for what a valid schema looks like -_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$") - -# Characters that are safe in any part of an URL. -_always_safe = frozenset( - bytearray( - b"abcdefghijklmnopqrstuvwxyz" - b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" - b"0123456789" - b"-._~" - ) -) - -_hexdigits = "0123456789ABCDEFabcdef" -_hextobyte = { - f"{a}{b}".encode("ascii"): int(f"{a}{b}", 16) - for a in _hexdigits - for b in _hexdigits -} -_bytetohex = [f"%{char:02X}".encode("ascii") for char in range(256)] - - -class _URLTuple(t.NamedTuple): - scheme: str - netloc: str - path: str - query: str - fragment: str - - -class BaseURL(_URLTuple): - """Superclass of :py:class:`URL` and :py:class:`BytesURL`.""" - - __slots__ = () - _at: str - _colon: str - _lbracket: str - _rbracket: str - - def __str__(self) -> str: - return self.to_url() - - def replace(self, **kwargs: t.Any) -> "BaseURL": - """Return an URL with the same values, except for those parameters - given new values by whichever keyword arguments are specified.""" - return self._replace(**kwargs) - - @property - def host(self) -> t.Optional[str]: - """The host part of the URL if available, otherwise `None`. The - host is either the hostname or the IP address mentioned in the - URL. It will not contain the port. - """ - return self._split_host()[0] - - @property - def ascii_host(self) -> t.Optional[str]: - """Works exactly like :attr:`host` but will return a result that - is restricted to ASCII. If it finds a netloc that is not ASCII - it will attempt to idna decode it. This is useful for socket - operations when the URL might include internationalized characters. - """ - rv = self.host - if rv is not None and isinstance(rv, str): - try: - rv = _encode_idna(rv) # type: ignore - except UnicodeError: - rv = rv.encode("ascii", "ignore") # type: ignore - return _to_str(rv, "ascii", "ignore") - - @property - def port(self) -> t.Optional[int]: - """The port in the URL as an integer if it was present, `None` - otherwise. This does not fill in default ports. - """ - try: - rv = int(_to_str(self._split_host()[1])) - if 0 <= rv <= 65535: - return rv - except (ValueError, TypeError): - pass - return None - - @property - def auth(self) -> t.Optional[str]: - """The authentication part in the URL if available, `None` - otherwise. - """ - return self._split_netloc()[0] - - @property - def username(self) -> t.Optional[str]: - """The username if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a string. - """ - rv = self._split_auth()[0] - if rv is not None: - return _url_unquote_legacy(rv) - return None - - @property - def raw_username(self) -> t.Optional[str]: - """The username if it was part of the URL, `None` otherwise. - Unlike :attr:`username` this one is not being decoded. - """ - return self._split_auth()[0] - - @property - def password(self) -> t.Optional[str]: - """The password if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a string. - """ - rv = self._split_auth()[1] - if rv is not None: - return _url_unquote_legacy(rv) - return None - - @property - def raw_password(self) -> t.Optional[str]: - """The password if it was part of the URL, `None` otherwise. - Unlike :attr:`password` this one is not being decoded. - """ - return self._split_auth()[1] - - def decode_query(self, *args: t.Any, **kwargs: t.Any) -> "ds.MultiDict[str, str]": - """Decodes the query part of the URL. Ths is a shortcut for - calling :func:`url_decode` on the query argument. The arguments and - keyword arguments are forwarded to :func:`url_decode` unchanged. - """ - return url_decode(self.query, *args, **kwargs) - - def join(self, *args: t.Any, **kwargs: t.Any) -> "BaseURL": - """Joins this URL with another one. This is just a convenience - function for calling into :meth:`url_join` and then parsing the - return value again. - """ - return url_parse(url_join(self, *args, **kwargs)) - - def to_url(self) -> str: - """Returns a URL string or bytes depending on the type of the - information stored. This is just a convenience function - for calling :meth:`url_unparse` for this URL. - """ - return url_unparse(self) - - def encode_netloc(self) -> str: - """Encodes the netloc part to an ASCII safe URL as bytes.""" - rv = self.ascii_host or "" - if ":" in rv: - rv = f"[{rv}]" - port = self.port - if port is not None: - rv = f"{rv}:{port}" - auth = ":".join( - filter( - None, - [ - url_quote(self.raw_username or "", "utf-8", "strict", "/:%"), - url_quote(self.raw_password or "", "utf-8", "strict", "/:%"), - ], - ) - ) - if auth: - rv = f"{auth}@{rv}" - return rv - - def decode_netloc(self) -> str: - """Decodes the netloc part into a string.""" - rv = _decode_idna(self.host or "") - - if ":" in rv: - rv = f"[{rv}]" - port = self.port - if port is not None: - rv = f"{rv}:{port}" - auth = ":".join( - filter( - None, - [ - _url_unquote_legacy(self.raw_username or "", "/:%@"), - _url_unquote_legacy(self.raw_password or "", "/:%@"), - ], - ) - ) - if auth: - rv = f"{auth}@{rv}" - return rv - - def to_uri_tuple(self) -> "BaseURL": - """Returns a :class:`BytesURL` tuple that holds a URI. This will - encode all the information in the URL properly to ASCII using the - rules a web browser would follow. - - It's usually more interesting to directly call :meth:`iri_to_uri` which - will return a string. - """ - return url_parse(iri_to_uri(self)) - - def to_iri_tuple(self) -> "BaseURL": - """Returns a :class:`URL` tuple that holds a IRI. This will try - to decode as much information as possible in the URL without - losing information similar to how a web browser does it for the - URL bar. - - It's usually more interesting to directly call :meth:`uri_to_iri` which - will return a string. - """ - return url_parse(uri_to_iri(self)) - - def get_file_location( - self, pathformat: t.Optional[str] = None - ) -> t.Tuple[t.Optional[str], t.Optional[str]]: - """Returns a tuple with the location of the file in the form - ``(server, location)``. If the netloc is empty in the URL or - points to localhost, it's represented as ``None``. - - The `pathformat` by default is autodetection but needs to be set - when working with URLs of a specific system. The supported values - are ``'windows'`` when working with Windows or DOS paths and - ``'posix'`` when working with posix paths. - - If the URL does not point to a local file, the server and location - are both represented as ``None``. - - :param pathformat: The expected format of the path component. - Currently ``'windows'`` and ``'posix'`` are - supported. Defaults to ``None`` which is - autodetect. - """ - if self.scheme != "file": - return None, None - - path = url_unquote(self.path) - host = self.netloc or None - - if pathformat is None: - if os.name == "nt": - pathformat = "windows" - else: - pathformat = "posix" - - if pathformat == "windows": - if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": - path = f"{path[1:2]}:{path[3:]}" - windows_share = path[:3] in ("\\" * 3, "/" * 3) - import ntpath - - path = ntpath.normpath(path) - # Windows shared drives are represented as ``\\host\\directory``. - # That results in a URL like ``file://///host/directory``, and a - # path like ``///host/directory``. We need to special-case this - # because the path contains the hostname. - if windows_share and host is None: - parts = path.lstrip("\\").split("\\", 1) - if len(parts) == 2: - host, path = parts - else: - host = parts[0] - path = "" - elif pathformat == "posix": - import posixpath - - path = posixpath.normpath(path) - else: - raise TypeError(f"Invalid path format {pathformat!r}") - - if host in ("127.0.0.1", "::1", "localhost"): - host = None - - return host, path - - def _split_netloc(self) -> t.Tuple[t.Optional[str], str]: - if self._at in self.netloc: - auth, _, netloc = self.netloc.partition(self._at) - return auth, netloc - return None, self.netloc - - def _split_auth(self) -> t.Tuple[t.Optional[str], t.Optional[str]]: - auth = self._split_netloc()[0] - if not auth: - return None, None - if self._colon not in auth: - return auth, None - - username, _, password = auth.partition(self._colon) - return username, password - - def _split_host(self) -> t.Tuple[t.Optional[str], t.Optional[str]]: - rv = self._split_netloc()[1] - if not rv: - return None, None - - if not rv.startswith(self._lbracket): - if self._colon in rv: - host, _, port = rv.partition(self._colon) - return host, port - return rv, None - - idx = rv.find(self._rbracket) - if idx < 0: - return rv, None - - host = rv[1:idx] - rest = rv[idx + 1 :] - if rest.startswith(self._colon): - return host, rest[1:] - return host, None - - -class URL(BaseURL): - """Represents a parsed URL. This behaves like a regular tuple but - also has some extra attributes that give further insight into the - URL. - """ - - __slots__ = () - _at = "@" - _colon = ":" - _lbracket = "[" - _rbracket = "]" - - def encode(self, charset: str = "utf-8", errors: str = "replace") -> "BytesURL": - """Encodes the URL to a tuple made out of bytes. The charset is - only being used for the path, query and fragment. - """ - return BytesURL( - self.scheme.encode("ascii"), # type: ignore - self.encode_netloc(), - self.path.encode(charset, errors), # type: ignore - self.query.encode(charset, errors), # type: ignore - self.fragment.encode(charset, errors), # type: ignore - ) - - -class BytesURL(BaseURL): - """Represents a parsed URL in bytes.""" - - __slots__ = () - _at = b"@" # type: ignore - _colon = b":" # type: ignore - _lbracket = b"[" # type: ignore - _rbracket = b"]" # type: ignore - - def __str__(self) -> str: - return self.to_url().decode("utf-8", "replace") # type: ignore - - def encode_netloc(self) -> bytes: # type: ignore - """Returns the netloc unchanged as bytes.""" - return self.netloc # type: ignore - - def decode(self, charset: str = "utf-8", errors: str = "replace") -> "URL": - """Decodes the URL to a tuple made out of strings. The charset is - only being used for the path, query and fragment. - """ - return URL( - self.scheme.decode("ascii"), # type: ignore - self.decode_netloc(), - self.path.decode(charset, errors), # type: ignore - self.query.decode(charset, errors), # type: ignore - self.fragment.decode(charset, errors), # type: ignore - ) - - -_unquote_maps: t.Dict[t.FrozenSet[int], t.Dict[bytes, int]] = {frozenset(): _hextobyte} - - -def _unquote_to_bytes( - string: t.Union[str, bytes], unsafe: t.Union[str, bytes] = "" -) -> bytes: - if isinstance(string, str): - string = string.encode("utf-8") - - if isinstance(unsafe, str): - unsafe = unsafe.encode("utf-8") - - unsafe = frozenset(bytearray(unsafe)) - groups = iter(string.split(b"%")) - result = bytearray(next(groups, b"")) - - try: - hex_to_byte = _unquote_maps[unsafe] - except KeyError: - hex_to_byte = _unquote_maps[unsafe] = { - h: b for h, b in _hextobyte.items() if b not in unsafe - } - - for group in groups: - code = group[:2] - - if code in hex_to_byte: - result.append(hex_to_byte[code]) - result.extend(group[2:]) - else: - result.append(37) # % - result.extend(group) - - return bytes(result) - - -def _url_encode_impl( - obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], - charset: str, - sort: bool, - key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]], -) -> t.Iterator[str]: - from .datastructures import iter_multi_items - - iterable: t.Iterable[t.Tuple[str, str]] = iter_multi_items(obj) - - if sort: - iterable = sorted(iterable, key=key) - - for key_str, value_str in iterable: - if value_str is None: - continue - - if not isinstance(key_str, bytes): - key_bytes = str(key_str).encode(charset) - else: - key_bytes = key_str - - if not isinstance(value_str, bytes): - value_bytes = str(value_str).encode(charset) - else: - value_bytes = value_str - - yield f"{_fast_url_quote_plus(key_bytes)}={_fast_url_quote_plus(value_bytes)}" - - -def _url_unquote_legacy(value: str, unsafe: str = "") -> str: - try: - return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe) - except UnicodeError: - return url_unquote(value, charset="latin1", unsafe=unsafe) - - -def url_parse( - url: str, scheme: t.Optional[str] = None, allow_fragments: bool = True -) -> BaseURL: - """Parses a URL from a string into a :class:`URL` tuple. If the URL - is lacking a scheme it can be provided as second argument. Otherwise, - it is ignored. Optionally fragments can be stripped from the URL - by setting `allow_fragments` to `False`. - - The inverse of this function is :func:`url_unparse`. - - :param url: the URL to parse. - :param scheme: the default schema to use if the URL is schemaless. - :param allow_fragments: if set to `False` a fragment will be removed - from the URL. - """ - s = _make_encode_wrapper(url) - is_text_based = isinstance(url, str) - - if scheme is None: - scheme = s("") - netloc = query = fragment = s("") - i = url.find(s(":")) - if i > 0 and _scheme_re.match(_to_str(url[:i], errors="replace")): - # make sure "iri" is not actually a port number (in which case - # "scheme" is really part of the path) - rest = url[i + 1 :] - if not rest or any(c not in s("0123456789") for c in rest): - # not a port number - scheme, url = url[:i].lower(), rest - - if url[:2] == s("//"): - delim = len(url) - for c in s("/?#"): - wdelim = url.find(c, 2) - if wdelim >= 0: - delim = min(delim, wdelim) - netloc, url = url[2:delim], url[delim:] - if (s("[") in netloc and s("]") not in netloc) or ( - s("]") in netloc and s("[") not in netloc - ): - raise ValueError("Invalid IPv6 URL") - - if allow_fragments and s("#") in url: - url, fragment = url.split(s("#"), 1) - if s("?") in url: - url, query = url.split(s("?"), 1) - - result_type = URL if is_text_based else BytesURL - return result_type(scheme, netloc, url, query, fragment) - - -def _make_fast_url_quote( - charset: str = "utf-8", - errors: str = "strict", - safe: t.Union[str, bytes] = "/:", - unsafe: t.Union[str, bytes] = "", -) -> t.Callable[[bytes], str]: - """Precompile the translation table for a URL encoding function. - - Unlike :func:`url_quote`, the generated function only takes the - string to quote. - - :param charset: The charset to encode the result with. - :param errors: How to handle encoding errors. - :param safe: An optional sequence of safe characters to never encode. - :param unsafe: An optional sequence of unsafe characters to always encode. - """ - if isinstance(safe, str): - safe = safe.encode(charset, errors) - - if isinstance(unsafe, str): - unsafe = unsafe.encode(charset, errors) - - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - table = [chr(c) if c in safe else f"%{c:02X}" for c in range(256)] - - def quote(string: bytes) -> str: - return "".join([table[c] for c in string]) - - return quote - - -_fast_url_quote = _make_fast_url_quote() -_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+") - - -def _fast_url_quote_plus(string: bytes) -> str: - return _fast_quote_plus(string).replace(" ", "+") - - -def url_quote( - string: t.Union[str, bytes], - charset: str = "utf-8", - errors: str = "strict", - safe: t.Union[str, bytes] = "/:", - unsafe: t.Union[str, bytes] = "", -) -> str: - """URL encode a single string with a given encoding. - - :param s: the string to quote. - :param charset: the charset to be used. - :param safe: an optional sequence of safe characters. - :param unsafe: an optional sequence of unsafe characters. - - .. versionadded:: 0.9.2 - The `unsafe` parameter was added. - """ - if not isinstance(string, (str, bytes, bytearray)): - string = str(string) - if isinstance(string, str): - string = string.encode(charset, errors) - if isinstance(safe, str): - safe = safe.encode(charset, errors) - if isinstance(unsafe, str): - unsafe = unsafe.encode(charset, errors) - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - rv = bytearray() - for char in bytearray(string): - if char in safe: - rv.append(char) - else: - rv.extend(_bytetohex[char]) - return bytes(rv).decode(charset) - - -def url_quote_plus( - string: str, charset: str = "utf-8", errors: str = "strict", safe: str = "" -) -> str: - """URL encode a single string with the given encoding and convert - whitespace to "+". - - :param s: The string to quote. - :param charset: The charset to be used. - :param safe: An optional sequence of safe characters. - """ - return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+") - - -def url_unparse(components: t.Tuple[str, str, str, str, str]) -> str: - """The reverse operation to :meth:`url_parse`. This accepts arbitrary - as well as :class:`URL` tuples and returns a URL as a string. - - :param components: the parsed URL as tuple which should be converted - into a URL string. - """ - _check_str_tuple(components) - scheme, netloc, path, query, fragment = components - s = _make_encode_wrapper(scheme) - url = s("") - - # We generally treat file:///x and file:/x the same which is also - # what browsers seem to do. This also allows us to ignore a schema - # register for netloc utilization or having to differentiate between - # empty and missing netloc. - if netloc or (scheme and path.startswith(s("/"))): - if path and path[:1] != s("/"): - path = s("/") + path - url = s("//") + (netloc or s("")) + path - elif path: - url += path - if scheme: - url = scheme + s(":") + url - if query: - url = url + s("?") + query - if fragment: - url = url + s("#") + fragment - return url - - -def url_unquote( - s: t.Union[str, bytes], - charset: str = "utf-8", - errors: str = "replace", - unsafe: str = "", -) -> str: - """URL decode a single string with a given encoding. If the charset - is set to `None` no decoding is performed and raw bytes are - returned. - - :param s: the string to unquote. - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param errors: the error handling for the charset decoding. - """ - rv = _unquote_to_bytes(s, unsafe) - if charset is None: - return rv - return rv.decode(charset, errors) - - -def url_unquote_plus( - s: t.Union[str, bytes], charset: str = "utf-8", errors: str = "replace" -) -> str: - """URL decode a single string with the given `charset` and decode "+" to - whitespace. - - Per default encoding errors are ignored. If you want a different behavior - you can set `errors` to ``'replace'`` or ``'strict'``. - - :param s: The string to unquote. - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param errors: The error handling for the `charset` decoding. - """ - if isinstance(s, str): - s = s.replace("+", " ") - else: - s = s.replace(b"+", b" ") - return url_unquote(s, charset, errors) - - -def url_fix(s: str, charset: str = "utf-8") -> str: - r"""Sometimes you get an URL by a user that just isn't a real URL because - it contains unsafe characters like ' ' and so on. This function can fix - some of the problems in a similar way browsers handle data entered by the - user: - - >>> url_fix('http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') - 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' - - :param s: the string with the URL to fix. - :param charset: The target charset for the URL if the url was given - as a string. - """ - # First step is to switch to text processing and to convert - # backslashes (which are invalid in URLs anyways) to slashes. This is - # consistent with what Chrome does. - s = _to_str(s, charset, "replace").replace("\\", "/") - - # For the specific case that we look like a malformed windows URL - # we want to fix this up manually: - if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"): - s = f"file:///{s[7:]}" - - url = url_parse(s) - path = url_quote(url.path, charset, safe="/%+$!*'(),") - qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),") - anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),") - return url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor)) - - -# not-unreserved characters remain quoted when unquoting to IRI -_to_iri_unsafe = "".join([chr(c) for c in range(128) if c not in _always_safe]) - - -def _codec_error_url_quote(e: UnicodeError) -> t.Tuple[str, int]: - """Used in :func:`uri_to_iri` after unquoting to re-quote any - invalid bytes. - """ - # the docs state that UnicodeError does have these attributes, - # but mypy isn't picking them up - out = _fast_url_quote(e.object[e.start : e.end]) # type: ignore - return out, e.end # type: ignore - - -codecs.register_error("werkzeug.url_quote", _codec_error_url_quote) - - -def uri_to_iri( - uri: t.Union[str, t.Tuple[str, str, str, str, str]], - charset: str = "utf-8", - errors: str = "werkzeug.url_quote", -) -> str: - """Convert a URI to an IRI. All valid UTF-8 characters are unquoted, - leaving all reserved and invalid characters quoted. If the URL has - a domain, it is decoded from Punycode. - - >>> uri_to_iri("http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF") - 'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF' - - :param uri: The URI to convert. - :param charset: The encoding to encode unquoted bytes with. - :param errors: Error handler to use during ``bytes.encode``. By - default, invalid bytes are left quoted. - - .. versionchanged:: 0.15 - All reserved and invalid characters remain quoted. Previously, - only some reserved characters were preserved, and invalid bytes - were replaced instead of left quoted. - - .. versionadded:: 0.6 - """ - if isinstance(uri, tuple): - uri = url_unparse(uri) - - uri = url_parse(_to_str(uri, charset)) - path = url_unquote(uri.path, charset, errors, _to_iri_unsafe) - query = url_unquote(uri.query, charset, errors, _to_iri_unsafe) - fragment = url_unquote(uri.fragment, charset, errors, _to_iri_unsafe) - return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment)) - - -# reserved characters remain unquoted when quoting to URI -_to_uri_safe = ":/?#[]@!$&'()*+,;=%" - - -def iri_to_uri( - iri: t.Union[str, t.Tuple[str, str, str, str, str]], - charset: str = "utf-8", - errors: str = "strict", - safe_conversion: bool = False, -) -> str: - """Convert an IRI to a URI. All non-ASCII and unsafe characters are - quoted. If the URL has a domain, it is encoded to Punycode. - - >>> iri_to_uri('http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF') - 'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF' - - :param iri: The IRI to convert. - :param charset: The encoding of the IRI. - :param errors: Error handler to use during ``bytes.encode``. - :param safe_conversion: Return the URL unchanged if it only contains - ASCII characters and no whitespace. See the explanation below. - - There is a general problem with IRI conversion with some protocols - that are in violation of the URI specification. Consider the - following two IRIs:: - - magnet:?xt=uri:whatever - itms-services://?action=download-manifest - - After parsing, we don't know if the scheme requires the ``//``, - which is dropped if empty, but conveys different meanings in the - final URL if it's present or not. In this case, you can use - ``safe_conversion``, which will return the URL unchanged if it only - contains ASCII characters and no whitespace. This can result in a - URI with unquoted characters if it was not already quoted correctly, - but preserves the URL's semantics. Werkzeug uses this for the - ``Location`` header for redirects. - - .. versionchanged:: 0.15 - All reserved characters remain unquoted. Previously, only some - reserved characters were left unquoted. - - .. versionchanged:: 0.9.6 - The ``safe_conversion`` parameter was added. - - .. versionadded:: 0.6 - """ - if isinstance(iri, tuple): - iri = url_unparse(iri) - - if safe_conversion: - # If we're not sure if it's safe to convert the URL, and it only - # contains ASCII characters, return it unconverted. - try: - native_iri = _to_str(iri) - ascii_iri = native_iri.encode("ascii") - - # Only return if it doesn't have whitespace. (Why?) - if len(ascii_iri.split()) == 1: - return native_iri - except UnicodeError: - pass - - iri = url_parse(_to_str(iri, charset, errors)) - path = url_quote(iri.path, charset, errors, _to_uri_safe) - query = url_quote(iri.query, charset, errors, _to_uri_safe) - fragment = url_quote(iri.fragment, charset, errors, _to_uri_safe) - return url_unparse((iri.scheme, iri.encode_netloc(), path, query, fragment)) - - -def url_decode( - s: t.AnyStr, - charset: str = "utf-8", - decode_keys: None = None, - include_empty: bool = True, - errors: str = "replace", - separator: str = "&", - cls: t.Optional[t.Type["ds.MultiDict"]] = None, -) -> "ds.MultiDict[str, str]": - """Parse a query string and return it as a :class:`MultiDict`. - - :param s: The query string to parse. - :param charset: Decode bytes to string with this charset. If not - given, bytes are returned as-is. - :param include_empty: Include keys with empty values in the dict. - :param errors: Error handling behavior when decoding bytes. - :param separator: Separator character between pairs. - :param cls: Container to hold result instead of :class:`MultiDict`. - - .. versionchanged:: 2.0 - The ``decode_keys`` parameter is deprecated and will be removed - in Werkzeug 2.1. - - .. versionchanged:: 0.5 - In previous versions ";" and "&" could be used for url decoding. - Now only "&" is supported. If you want to use ";", a different - ``separator`` can be provided. - - .. versionchanged:: 0.5 - The ``cls`` parameter was added. - """ - if decode_keys is not None: - warnings.warn( - "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - if cls is None: - from .datastructures import MultiDict # noqa: F811 - - cls = MultiDict - if isinstance(s, str) and not isinstance(separator, str): - separator = separator.decode(charset or "ascii") - elif isinstance(s, bytes) and not isinstance(separator, bytes): - separator = separator.encode(charset or "ascii") # type: ignore - return cls( - _url_decode_impl( - s.split(separator), charset, include_empty, errors # type: ignore - ) - ) - - -def url_decode_stream( - stream: t.IO[bytes], - charset: str = "utf-8", - decode_keys: None = None, - include_empty: bool = True, - errors: str = "replace", - separator: bytes = b"&", - cls: t.Optional[t.Type["ds.MultiDict"]] = None, - limit: t.Optional[int] = None, - return_iterator: bool = False, -) -> "ds.MultiDict[str, str]": - """Works like :func:`url_decode` but decodes a stream. The behavior - of stream and limit follows functions like - :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is - directly fed to the `cls` so you can consume the data while it's - parsed. - - :param stream: a stream with the encoded querystring - :param charset: the charset of the query string. If set to `None` - no decoding will take place. - :param include_empty: Set to `False` if you don't want empty values to - appear in the dict. - :param errors: the decoding error behavior. - :param separator: the pair separator to be used, defaults to ``&`` - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param limit: the content length of the URL data. Not necessary if - a limited stream is provided. - - .. versionchanged:: 2.0 - The ``decode_keys`` and ``return_iterator`` parameters are - deprecated and will be removed in Werkzeug 2.1. - - .. versionadded:: 0.8 - """ - from .wsgi import make_chunk_iter - - if decode_keys is not None: - warnings.warn( - "'decode_keys' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - - pair_iter = make_chunk_iter(stream, separator, limit) - decoder = _url_decode_impl(pair_iter, charset, include_empty, errors) - - if return_iterator: - warnings.warn( - "'return_iterator' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return decoder # type: ignore - - if cls is None: - from .datastructures import MultiDict # noqa: F811 - - cls = MultiDict - - return cls(decoder) - - -def _url_decode_impl( - pair_iter: t.Iterable[t.AnyStr], charset: str, include_empty: bool, errors: str -) -> t.Iterator[t.Tuple[str, str]]: - for pair in pair_iter: - if not pair: - continue - s = _make_encode_wrapper(pair) - equal = s("=") - if equal in pair: - key, value = pair.split(equal, 1) - else: - if not include_empty: - continue - key = pair - value = s("") - yield ( - url_unquote_plus(key, charset, errors), - url_unquote_plus(value, charset, errors), - ) - - -def url_encode( - obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], - charset: str = "utf-8", - encode_keys: None = None, - sort: bool = False, - key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None, - separator: str = "&", -) -> str: - """URL encode a dict/`MultiDict`. If a value is `None` it will not appear - in the result string. Per default only values are encoded into the target - charset strings. - - :param obj: the object to encode into a query string. - :param charset: the charset of the query string. - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - - .. versionchanged:: 2.0 - The ``encode_keys`` parameter is deprecated and will be removed - in Werkzeug 2.1. - - .. versionchanged:: 0.5 - Added the ``sort``, ``key``, and ``separator`` parameters. - """ - if encode_keys is not None: - warnings.warn( - "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - separator = _to_str(separator, "ascii") - return separator.join(_url_encode_impl(obj, charset, sort, key)) - - -def url_encode_stream( - obj: t.Union[t.Mapping[str, str], t.Iterable[t.Tuple[str, str]]], - stream: t.Optional[t.IO[str]] = None, - charset: str = "utf-8", - encode_keys: None = None, - sort: bool = False, - key: t.Optional[t.Callable[[t.Tuple[str, str]], t.Any]] = None, - separator: str = "&", -) -> None: - """Like :meth:`url_encode` but writes the results to a stream - object. If the stream is `None` a generator over all encoded - pairs is returned. - - :param obj: the object to encode into a query string. - :param stream: a stream to write the encoded object into or `None` if - an iterator over the encoded pairs should be returned. In - that case the separator argument is ignored. - :param charset: the charset of the query string. - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - - .. versionchanged:: 2.0 - The ``encode_keys`` parameter is deprecated and will be removed - in Werkzeug 2.1. - - .. versionadded:: 0.8 - """ - if encode_keys is not None: - warnings.warn( - "'encode_keys' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - separator = _to_str(separator, "ascii") - gen = _url_encode_impl(obj, charset, sort, key) - if stream is None: - return gen # type: ignore - for idx, chunk in enumerate(gen): - if idx: - stream.write(separator) - stream.write(chunk) - return None - - -def url_join( - base: t.Union[str, t.Tuple[str, str, str, str, str]], - url: t.Union[str, t.Tuple[str, str, str, str, str]], - allow_fragments: bool = True, -) -> str: - """Join a base URL and a possibly relative URL to form an absolute - interpretation of the latter. - - :param base: the base URL for the join operation. - :param url: the URL to join. - :param allow_fragments: indicates whether fragments should be allowed. - """ - if isinstance(base, tuple): - base = url_unparse(base) - if isinstance(url, tuple): - url = url_unparse(url) - - _check_str_tuple((base, url)) - s = _make_encode_wrapper(base) - - if not base: - return url - if not url: - return base - - bscheme, bnetloc, bpath, bquery, bfragment = url_parse( - base, allow_fragments=allow_fragments - ) - scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments) - if scheme != bscheme: - return url - if netloc: - return url_unparse((scheme, netloc, path, query, fragment)) - netloc = bnetloc - - if path[:1] == s("/"): - segments = path.split(s("/")) - elif not path: - segments = bpath.split(s("/")) - if not query: - query = bquery - else: - segments = bpath.split(s("/"))[:-1] + path.split(s("/")) - - # If the rightmost part is "./" we want to keep the slash but - # remove the dot. - if segments[-1] == s("."): - segments[-1] = s("") - - # Resolve ".." and "." - segments = [segment for segment in segments if segment != s(".")] - while True: - i = 1 - n = len(segments) - 1 - while i < n: - if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")): - del segments[i - 1 : i + 1] - break - i += 1 - else: - break - - # Remove trailing ".." if the URL is absolute - unwanted_marker = [s(""), s("..")] - while segments[:2] == unwanted_marker: - del segments[1] - - path = s("/").join(segments) - return url_unparse((scheme, netloc, path, query, fragment)) - - -class Href: - """Implements a callable that constructs URLs with the given base. The - function can be called with any number of positional and keyword - arguments which than are used to assemble the URL. Works with URLs - and posix paths. - - Positional arguments are appended as individual segments to - the path of the URL: - - >>> href = Href('/foo') - >>> href('bar', 23) - '/foo/bar/23' - >>> href('foo', bar=23) - '/foo/foo?bar=23' - - If any of the arguments (positional or keyword) evaluates to `None` it - will be skipped. If no keyword arguments are given the last argument - can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), - otherwise the keyword arguments are used for the query parameters, cutting - off the first trailing underscore of the parameter name: - - >>> href(is_=42) - '/foo?is=42' - >>> href({'foo': 'bar'}) - '/foo?foo=bar' - - Combining of both methods is not allowed: - - >>> href({'foo': 'bar'}, bar=42) - Traceback (most recent call last): - ... - TypeError: keyword arguments and query-dicts can't be combined - - Accessing attributes on the href object creates a new href object with - the attribute name as prefix: - - >>> bar_href = href.bar - >>> bar_href("blub") - '/foo/bar/blub' - - If `sort` is set to `True` the items are sorted by `key` or the default - sorting algorithm: - - >>> href = Href("/", sort=True) - >>> href(a=1, b=2, c=3) - '/?a=1&b=2&c=3' - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :mod:`werkzeug.routing` - instead. - - .. versionadded:: 0.5 - `sort` and `key` were added. - """ - - def __init__( # type: ignore - self, base="./", charset="utf-8", sort=False, key=None - ): - warnings.warn( - "'Href' is deprecated and will be removed in Werkzeug 2.1." - " Use 'werkzeug.routing' instead.", - DeprecationWarning, - stacklevel=2, - ) - - if not base: - base = "./" - self.base = base - self.charset = charset - self.sort = sort - self.key = key - - def __getattr__(self, name): # type: ignore - if name[:2] == "__": - raise AttributeError(name) - base = self.base - if base[-1:] != "/": - base += "/" - return Href(url_join(base, name), self.charset, self.sort, self.key) - - def __call__(self, *path, **query): # type: ignore - if path and isinstance(path[-1], dict): - if query: - raise TypeError("keyword arguments and query-dicts can't be combined") - query, path = path[-1], path[:-1] - elif query: - query = {k[:-1] if k.endswith("_") else k: v for k, v in query.items()} - path = "/".join( - [ - _to_str(url_quote(x, self.charset), "ascii") - for x in path - if x is not None - ] - ).lstrip("/") - rv = self.base - if path: - if not rv.endswith("/"): - rv += "/" - rv = url_join(rv, f"./{path}") - if query: - rv += "?" + _to_str( - url_encode(query, self.charset, sort=self.sort, key=self.key), "ascii" - ) - return rv diff --git a/venv/lib/python3.8/site-packages/werkzeug/user_agent.py b/venv/lib/python3.8/site-packages/werkzeug/user_agent.py deleted file mode 100644 index 66ffcbe..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/user_agent.py +++ /dev/null @@ -1,47 +0,0 @@ -import typing as t - - -class UserAgent: - """Represents a parsed user agent header value. - - The default implementation does no parsing, only the :attr:`string` - attribute is set. A subclass may parse the string to set the - common attributes or expose other information. Set - :attr:`werkzeug.wrappers.Request.user_agent_class` to use a - subclass. - - :param string: The header value to parse. - - .. versionadded:: 2.0 - This replaces the previous ``useragents`` module, but does not - provide a built-in parser. - """ - - platform: t.Optional[str] = None - """The OS name, if it could be parsed from the string.""" - - browser: t.Optional[str] = None - """The browser name, if it could be parsed from the string.""" - - version: t.Optional[str] = None - """The browser version, if it could be parsed from the string.""" - - language: t.Optional[str] = None - """The browser language, if it could be parsed from the string.""" - - def __init__(self, string: str) -> None: - self.string: str = string - """The original header value.""" - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.browser}/{self.version}>" - - def __str__(self) -> str: - return self.string - - def __bool__(self) -> bool: - return bool(self.browser) - - def to_header(self) -> str: - """Convert to a header value.""" - return self.string diff --git a/venv/lib/python3.8/site-packages/werkzeug/useragents.py b/venv/lib/python3.8/site-packages/werkzeug/useragents.py deleted file mode 100644 index 4deed8f..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/useragents.py +++ /dev/null @@ -1,215 +0,0 @@ -import re -import typing as t -import warnings - -from .user_agent import UserAgent as _BaseUserAgent - -if t.TYPE_CHECKING: - from _typeshed.wsgi import WSGIEnvironment - - -class _UserAgentParser: - platform_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = ( - (" cros ", "chromeos"), - ("iphone|ios", "iphone"), - ("ipad", "ipad"), - (r"darwin\b|mac\b|os\s*x", "macos"), - ("win", "windows"), - (r"android", "android"), - ("netbsd", "netbsd"), - ("openbsd", "openbsd"), - ("freebsd", "freebsd"), - ("dragonfly", "dragonflybsd"), - ("(sun|i86)os", "solaris"), - (r"x11\b|lin(\b|ux)?", "linux"), - (r"nintendo\s+wii", "wii"), - ("irix", "irix"), - ("hp-?ux", "hpux"), - ("aix", "aix"), - ("sco|unix_sv", "sco"), - ("bsd", "bsd"), - ("amiga", "amiga"), - ("blackberry|playbook", "blackberry"), - ("symbian", "symbian"), - ) - browser_rules: t.ClassVar[t.Iterable[t.Tuple[str, str]]] = ( - ("googlebot", "google"), - ("msnbot", "msn"), - ("yahoo", "yahoo"), - ("ask jeeves", "ask"), - (r"aol|america\s+online\s+browser", "aol"), - (r"opera|opr", "opera"), - ("edge|edg", "edge"), - ("chrome|crios", "chrome"), - ("seamonkey", "seamonkey"), - ("firefox|firebird|phoenix|iceweasel", "firefox"), - ("galeon", "galeon"), - ("safari|version", "safari"), - ("webkit", "webkit"), - ("camino", "camino"), - ("konqueror", "konqueror"), - ("k-meleon", "kmeleon"), - ("netscape", "netscape"), - (r"msie|microsoft\s+internet\s+explorer|trident/.+? rv:", "msie"), - ("lynx", "lynx"), - ("links", "links"), - ("Baiduspider", "baidu"), - ("bingbot", "bing"), - ("mozilla", "mozilla"), - ) - - _browser_version_re = r"(?:{pattern})[/\sa-z(]*(\d+[.\da-z]+)?" - _language_re = re.compile( - r"(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|" - r"(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)" - ) - - def __init__(self) -> None: - self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platform_rules] - self.browsers = [ - (b, re.compile(self._browser_version_re.format(pattern=a), re.I)) - for a, b in self.browser_rules - ] - - def __call__( - self, user_agent: str - ) -> t.Tuple[t.Optional[str], t.Optional[str], t.Optional[str], t.Optional[str]]: - platform: t.Optional[str] - browser: t.Optional[str] - version: t.Optional[str] - language: t.Optional[str] - - for platform, regex in self.platforms: # noqa: B007 - match = regex.search(user_agent) - if match is not None: - break - else: - platform = None - - # Except for Trident, all browser key words come after the last ')' - last_closing_paren = 0 - if ( - not re.compile(r"trident/.+? rv:", re.I).search(user_agent) - and ")" in user_agent - and user_agent[-1] != ")" - ): - last_closing_paren = user_agent.rindex(")") - - for browser, regex in self.browsers: # noqa: B007 - match = regex.search(user_agent[last_closing_paren:]) - if match is not None: - version = match.group(1) - break - else: - browser = version = None - match = self._language_re.search(user_agent) - if match is not None: - language = match.group(1) or match.group(2) - else: - language = None - return platform, browser, version, language - - -# It wasn't public, but users might have imported it anyway, show a -# warning if a user created an instance. -class UserAgentParser(_UserAgentParser): - """A simple user agent parser. Used by the `UserAgent`. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use a dedicated parser library - instead. - """ - - def __init__(self) -> None: - warnings.warn( - "'UserAgentParser' is deprecated and will be removed in" - " Werkzeug 2.1. Use a dedicated parser library instead.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__() - - -class _deprecated_property(property): - def __init__(self, fget: t.Callable[["_UserAgent"], t.Any]) -> None: - super().__init__(fget) - self.message = ( - "The built-in user agent parser is deprecated and will be" - f" removed in Werkzeug 2.1. The {fget.__name__!r} property" - " will be 'None'. Subclass 'werkzeug.user_agent.UserAgent'" - " and set 'Request.user_agent_class' to use a different" - " parser." - ) - - def __get__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - warnings.warn(self.message, DeprecationWarning, stacklevel=3) - return super().__get__(*args, **kwargs) - - -# This is what Request.user_agent returns for now, only show warnings on -# attribute access, not creation. -class _UserAgent(_BaseUserAgent): - _parser = _UserAgentParser() - - def __init__(self, string: str) -> None: - super().__init__(string) - info = self._parser(string) - self._platform, self._browser, self._version, self._language = info - - @_deprecated_property - def platform(self) -> t.Optional[str]: # type: ignore - return self._platform - - @_deprecated_property - def browser(self) -> t.Optional[str]: # type: ignore - return self._browser - - @_deprecated_property - def version(self) -> t.Optional[str]: # type: ignore - return self._version - - @_deprecated_property - def language(self) -> t.Optional[str]: # type: ignore - return self._language - - -# This is what users might be importing, show warnings on create. -class UserAgent(_UserAgent): - """Represents a parsed user agent header value. - - This uses a basic parser to try to extract some information from the - header. - - :param environ_or_string: The header value to parse, or a WSGI - environ containing the header. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Subclass - :class:`werkzeug.user_agent.UserAgent` (note the new module - name) to use a dedicated parser instead. - - .. versionchanged:: 2.0 - Passing a WSGI environ is deprecated and will be removed in 2.1. - """ - - def __init__(self, environ_or_string: "t.Union[str, WSGIEnvironment]") -> None: - if isinstance(environ_or_string, dict): - warnings.warn( - "Passing an environ to 'UserAgent' is deprecated and" - " will be removed in Werkzeug 2.1. Pass the header" - " value string instead.", - DeprecationWarning, - stacklevel=2, - ) - string = environ_or_string.get("HTTP_USER_AGENT", "") - else: - string = environ_or_string - - warnings.warn( - "The 'werkzeug.useragents' module is deprecated and will be" - " removed in Werkzeug 2.1. The new base API is" - " 'werkzeug.user_agent.UserAgent'.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(string) diff --git a/venv/lib/python3.8/site-packages/werkzeug/utils.py b/venv/lib/python3.8/site-packages/werkzeug/utils.py deleted file mode 100644 index 9007231..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/utils.py +++ /dev/null @@ -1,1099 +0,0 @@ -import codecs -import io -import mimetypes -import os -import pkgutil -import re -import sys -import typing as t -import unicodedata -import warnings -from datetime import datetime -from html.entities import name2codepoint -from time import time -from zlib import adler32 - -from ._internal import _DictAccessorProperty -from ._internal import _missing -from ._internal import _parse_signature -from ._internal import _TAccessorValue -from .datastructures import Headers -from .exceptions import NotFound -from .exceptions import RequestedRangeNotSatisfiable -from .security import safe_join -from .urls import url_quote -from .wsgi import wrap_file - -if t.TYPE_CHECKING: - from _typeshed.wsgi import WSGIEnvironment - from .wrappers.request import Request - from .wrappers.response import Response - -_T = t.TypeVar("_T") - -_entity_re = re.compile(r"&([^;]+);") -_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") -_windows_device_files = ( - "CON", - "AUX", - "COM1", - "COM2", - "COM3", - "COM4", - "LPT1", - "LPT2", - "LPT3", - "PRN", - "NUL", -) - - -class cached_property(property, t.Generic[_T]): - """A :func:`property` that is only evaluated once. Subsequent access - returns the cached value. Setting the property sets the cached - value. Deleting the property clears the cached value, accessing it - again will evaluate it again. - - .. code-block:: python - - class Example: - @cached_property - def value(self): - # calculate something important here - return 42 - - e = Example() - e.value # evaluates - e.value # uses cache - e.value = 16 # sets cache - del e.value # clears cache - - The class must have a ``__dict__`` for this to work. - - .. versionchanged:: 2.0 - ``del obj.name`` clears the cached value. - """ - - def __init__( - self, - fget: t.Callable[[t.Any], _T], - name: t.Optional[str] = None, - doc: t.Optional[str] = None, - ) -> None: - super().__init__(fget, doc=doc) - self.__name__ = name or fget.__name__ - self.__module__ = fget.__module__ - - def __set__(self, obj: object, value: _T) -> None: - obj.__dict__[self.__name__] = value - - def __get__(self, obj: object, type: type = None) -> _T: # type: ignore - if obj is None: - return self # type: ignore - - value: _T = obj.__dict__.get(self.__name__, _missing) - - if value is _missing: - value = self.fget(obj) # type: ignore - obj.__dict__[self.__name__] = value - - return value - - def __delete__(self, obj: object) -> None: - del obj.__dict__[self.__name__] - - -def invalidate_cached_property(obj: object, name: str) -> None: - """Invalidates the cache for a :class:`cached_property`: - - >>> class Test(object): - ... @cached_property - ... def magic_number(self): - ... print("recalculating...") - ... return 42 - ... - >>> var = Test() - >>> var.magic_number - recalculating... - 42 - >>> var.magic_number - 42 - >>> invalidate_cached_property(var, "magic_number") - >>> var.magic_number - recalculating... - 42 - - You must pass the name of the cached property as the second argument. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use ``del obj.name`` instead. - """ - warnings.warn( - "'invalidate_cached_property' is deprecated and will be removed" - " in Werkzeug 2.1. Use 'del obj.name' instead.", - DeprecationWarning, - stacklevel=2, - ) - delattr(obj, name) - - -class environ_property(_DictAccessorProperty[_TAccessorValue]): - """Maps request attributes to environment variables. This works not only - for the Werkzeug request object, but also any other class with an - environ attribute: - - >>> class Test(object): - ... environ = {'key': 'value'} - ... test = environ_property('key') - >>> var = Test() - >>> var.test - 'value' - - If you pass it a second value it's used as default if the key does not - exist, the third one can be a converter that takes a value and converts - it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value - is used. If no default value is provided `None` is used. - - Per default the property is read only. You have to explicitly enable it - by passing ``read_only=False`` to the constructor. - """ - - read_only = True - - def lookup(self, obj: "Request") -> "WSGIEnvironment": - return obj.environ - - -class header_property(_DictAccessorProperty[_TAccessorValue]): - """Like `environ_property` but for headers.""" - - def lookup(self, obj: t.Union["Request", "Response"]) -> Headers: - return obj.headers - - -class HTMLBuilder: - """Helper object for HTML generation. - - Per default there are two instances of that class. The `html` one, and - the `xhtml` one for those two dialects. The class uses keyword parameters - and positional parameters to generate small snippets of HTML. - - Keyword parameters are converted to XML/SGML attributes, positional - arguments are used as children. Because Python accepts positional - arguments before keyword arguments it's a good idea to use a list with the - star-syntax for some children: - - >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ', - ... html.a('bar', href='bar.html')]) - '

    foo bar

    ' - - This class works around some browser limitations and can not be used for - arbitrary SGML/XML generation. For that purpose lxml and similar - libraries exist. - - Calling the builder escapes the string passed: - - >>> html.p(html("")) - '

    <foo>

    ' - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. - """ - - _entity_re = re.compile(r"&([^;]+);") - _entities = name2codepoint.copy() - _entities["apos"] = 39 - _empty_elements = { - "area", - "base", - "basefont", - "br", - "col", - "command", - "embed", - "frame", - "hr", - "img", - "input", - "keygen", - "isindex", - "link", - "meta", - "param", - "source", - "wbr", - } - _boolean_attributes = { - "selected", - "checked", - "compact", - "declare", - "defer", - "disabled", - "ismap", - "multiple", - "nohref", - "noresize", - "noshade", - "nowrap", - } - _plaintext_elements = {"textarea"} - _c_like_cdata = {"script", "style"} - - def __init__(self, dialect): # type: ignore - self._dialect = dialect - - def __call__(self, s): # type: ignore - import html - - warnings.warn( - "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - return html.escape(s) - - def __getattr__(self, tag): # type: ignore - import html - - warnings.warn( - "'utils.HTMLBuilder' is deprecated and will be removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - if tag[:2] == "__": - raise AttributeError(tag) - - def proxy(*children, **arguments): # type: ignore - buffer = f"<{tag}" - for key, value in arguments.items(): - if value is None: - continue - if key[-1] == "_": - key = key[:-1] - if key in self._boolean_attributes: - if not value: - continue - if self._dialect == "xhtml": - value = f'="{key}"' - else: - value = "" - else: - value = f'="{html.escape(value)}"' - buffer += f" {key}{value}" - if not children and tag in self._empty_elements: - if self._dialect == "xhtml": - buffer += " />" - else: - buffer += ">" - return buffer - buffer += ">" - - children_as_string = "".join([str(x) for x in children if x is not None]) - - if children_as_string: - if tag in self._plaintext_elements: - children_as_string = html.escape(children_as_string) - elif tag in self._c_like_cdata and self._dialect == "xhtml": - children_as_string = f"/**/" - buffer += children_as_string + f"" - return buffer - - return proxy - - def __repr__(self) -> str: - return f"<{type(self).__name__} for {self._dialect!r}>" - - -html = HTMLBuilder("html") -xhtml = HTMLBuilder("xhtml") - -# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in -# https://www.iana.org/assignments/media-types/media-types.xhtml -# Types listed in the XDG mime info that have a charset in the IANA registration. -_charset_mimetypes = { - "application/ecmascript", - "application/javascript", - "application/sql", - "application/xml", - "application/xml-dtd", - "application/xml-external-parsed-entity", -} - - -def get_content_type(mimetype: str, charset: str) -> str: - """Returns the full content type string with charset for a mimetype. - - If the mimetype represents text, the charset parameter will be - appended, otherwise the mimetype is returned unchanged. - - :param mimetype: The mimetype to be used as content type. - :param charset: The charset to be appended for text mimetypes. - :return: The content type. - - .. versionchanged:: 0.15 - Any type that ends with ``+xml`` gets a charset, not just those - that start with ``application/``. Known text types such as - ``application/javascript`` are also given charsets. - """ - if ( - mimetype.startswith("text/") - or mimetype in _charset_mimetypes - or mimetype.endswith("+xml") - ): - mimetype += f"; charset={charset}" - - return mimetype - - -def detect_utf_encoding(data: bytes) -> str: - """Detect which UTF encoding was used to encode the given bytes. - - The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is - accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big - or little endian. Some editors or libraries may prepend a BOM. - - :internal: - - :param data: Bytes in unknown UTF encoding. - :return: UTF encoding name - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. This is built in to - :func:`json.loads`. - - .. versionadded:: 0.15 - """ - warnings.warn( - "'detect_utf_encoding' is deprecated and will be removed in" - " Werkzeug 2.1. This is built in to 'json.loads'.", - DeprecationWarning, - stacklevel=2, - ) - head = data[:4] - - if head[:3] == codecs.BOM_UTF8: - return "utf-8-sig" - - if b"\x00" not in head: - return "utf-8" - - if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): - return "utf-32" - - if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): - return "utf-16" - - if len(head) == 4: - if head[:3] == b"\x00\x00\x00": - return "utf-32-be" - - if head[::2] == b"\x00\x00": - return "utf-16-be" - - if head[1:] == b"\x00\x00\x00": - return "utf-32-le" - - if head[1::2] == b"\x00\x00": - return "utf-16-le" - - if len(head) == 2: - return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le" - - return "utf-8" - - -def format_string(string: str, context: t.Mapping[str, t.Any]) -> str: - """String-template format a string: - - >>> format_string('$foo and ${foo}s', dict(foo=42)) - '42 and 42s' - - This does not do any attribute lookup. - - :param string: the format string. - :param context: a dict with the variables to insert. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :class:`string.Template` - instead. - """ - from string import Template - - warnings.warn( - "'utils.format_string' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'string.Template' instead.", - DeprecationWarning, - stacklevel=2, - ) - return Template(string).substitute(context) - - -def secure_filename(filename: str) -> str: - r"""Pass it a filename and it will return a secure version of it. This - filename can then safely be stored on a regular file system and passed - to :func:`os.path.join`. The filename returned is an ASCII only string - for maximum portability. - - On windows systems the function also makes sure that the file is not - named after one of the special device files. - - >>> secure_filename("My cool movie.mov") - 'My_cool_movie.mov' - >>> secure_filename("../../../etc/passwd") - 'etc_passwd' - >>> secure_filename('i contain cool \xfcml\xe4uts.txt') - 'i_contain_cool_umlauts.txt' - - The function might return an empty filename. It's your responsibility - to ensure that the filename is unique and that you abort or - generate a random filename if the function returned an empty one. - - .. versionadded:: 0.5 - - :param filename: the filename to secure - """ - filename = unicodedata.normalize("NFKD", filename) - filename = filename.encode("ascii", "ignore").decode("ascii") - - for sep in os.path.sep, os.path.altsep: - if sep: - filename = filename.replace(sep, " ") - filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip( - "._" - ) - - # on nt a couple of special files are present in each folder. We - # have to ensure that the target file is not such a filename. In - # this case we prepend an underline - if ( - os.name == "nt" - and filename - and filename.split(".")[0].upper() in _windows_device_files - ): - filename = f"_{filename}" - - return filename - - -def escape(s: t.Any) -> str: - """Replace ``&``, ``<``, ``>``, ``"``, and ``'`` with HTML-safe - sequences. - - ``None`` is escaped to an empty string. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use MarkupSafe instead. - """ - import html - - warnings.warn( - "'utils.escape' is deprecated and will be removed in Werkzeug" - " 2.1. Use MarkupSafe instead.", - DeprecationWarning, - stacklevel=2, - ) - - if s is None: - return "" - - if hasattr(s, "__html__"): - return s.__html__() # type: ignore - - if not isinstance(s, str): - s = str(s) - - return html.escape(s, quote=True) # type: ignore - - -def unescape(s: str) -> str: - """The reverse of :func:`escape`. This unescapes all the HTML - entities, not only those inserted by ``escape``. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use MarkupSafe instead. - """ - import html - - warnings.warn( - "'utils.unescape' is deprecated and will be removed in Werkzueg" - " 2.1. Use MarkupSafe instead.", - DeprecationWarning, - stacklevel=2, - ) - return html.unescape(s) - - -def redirect( - location: str, code: int = 302, Response: t.Optional[t.Type["Response"]] = None -) -> "Response": - """Returns a response object (a WSGI application) that, if called, - redirects the client to the target location. Supported codes are - 301, 302, 303, 305, 307, and 308. 300 is not supported because - it's not a real redirect and 304 because it's the answer for a - request with a request with defined If-Modified-Since headers. - - .. versionadded:: 0.6 - The location can now be a unicode string that is encoded using - the :func:`iri_to_uri` function. - - .. versionadded:: 0.10 - The class used for the Response object can now be passed in. - - :param location: the location the response should redirect to. - :param code: the redirect status code. defaults to 302. - :param class Response: a Response class to use when instantiating a - response. The default is :class:`werkzeug.wrappers.Response` if - unspecified. - """ - import html - - if Response is None: - from .wrappers import Response # type: ignore - - display_location = html.escape(location) - if isinstance(location, str): - # Safe conversion is necessary here as we might redirect - # to a broken URI scheme (for instance itms-services). - from .urls import iri_to_uri - - location = iri_to_uri(location, safe_conversion=True) - response = Response( # type: ignore - '\n' - "Redirecting...\n" - "

    Redirecting...

    \n" - "

    You should be redirected automatically to target URL: " - f'{display_location}. If' - " not click the link.", - code, - mimetype="text/html", - ) - response.headers["Location"] = location - return response - - -def append_slash_redirect(environ: "WSGIEnvironment", code: int = 301) -> "Response": - """Redirects to the same URL but with a slash appended. The behavior - of this function is undefined if the path ends with a slash already. - - :param environ: the WSGI environment for the request that triggers - the redirect. - :param code: the status code for the redirect. - """ - new_path = environ["PATH_INFO"].strip("/") + "/" - query_string = environ.get("QUERY_STRING") - if query_string: - new_path += f"?{query_string}" - return redirect(new_path, code) - - -def send_file( - path_or_file: t.Union[os.PathLike, str, t.IO[bytes]], - environ: "WSGIEnvironment", - mimetype: t.Optional[str] = None, - as_attachment: bool = False, - download_name: t.Optional[str] = None, - conditional: bool = True, - etag: t.Union[bool, str] = True, - last_modified: t.Optional[t.Union[datetime, int, float]] = None, - max_age: t.Optional[ - t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] - ] = None, - use_x_sendfile: bool = False, - response_class: t.Optional[t.Type["Response"]] = None, - _root_path: t.Optional[t.Union[os.PathLike, str]] = None, -) -> "Response": - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, ``use_x_sendfile=True`` - will tell the server to send the given path, which is much more - efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param environ: The WSGI environ for the current request. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - :param use_x_sendfile: Set the ``X-Sendfile`` header to let the - server to efficiently send the file. Requires support from the - HTTP server. Requires passing a file path. - :param response_class: Build the response using this class. Defaults - to :class:`~werkzeug.wrappers.Response`. - :param _root_path: Do not use. For internal use only. Use - :func:`send_from_directory` to safely send files under a path. - - .. versionchanged:: 2.0.2 - ``send_file`` only sets a detected ``Content-Encoding`` if - ``as_attachment`` is disabled. - - .. versionadded:: 2.0 - Adapted from Flask's implementation. - - .. versionchanged:: 2.0 - ``download_name`` replaces Flask's ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces Flask's ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces Flask's ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - If an encoding is returned when guessing ``mimetype`` from - ``download_name``, set the ``Content-Encoding`` header. - """ - if response_class is None: - from .wrappers import Response - - response_class = Response - - path: t.Optional[str] = None - file: t.Optional[t.IO[bytes]] = None - size: t.Optional[int] = None - mtime: t.Optional[float] = None - headers = Headers() - - if isinstance(path_or_file, (os.PathLike, str)) or hasattr( - path_or_file, "__fspath__" - ): - path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) - - # Flask will pass app.root_path, allowing its send_file wrapper - # to not have to deal with paths. - if _root_path is not None: - path = os.path.join(_root_path, path_or_file) - else: - path = os.path.abspath(path_or_file) - - stat = os.stat(path) - size = stat.st_size - mtime = stat.st_mtime - else: - file = path_or_file - - if download_name is None and path is not None: - download_name = os.path.basename(path) - - if mimetype is None: - if download_name is None: - raise TypeError( - "Unable to detect the MIME type because a file name is" - " not available. Either set 'download_name', pass a" - " path instead of a file, or set 'mimetype'." - ) - - mimetype, encoding = mimetypes.guess_type(download_name) - - if mimetype is None: - mimetype = "application/octet-stream" - - # Don't send encoding for attachments, it causes browsers to - # save decompress tar.gz files. - if encoding is not None and not as_attachment: - headers.set("Content-Encoding", encoding) - - if download_name is not None: - try: - download_name.encode("ascii") - except UnicodeEncodeError: - simple = unicodedata.normalize("NFKD", download_name) - simple = simple.encode("ascii", "ignore").decode("ascii") - quoted = url_quote(download_name, safe="") - names = {"filename": simple, "filename*": f"UTF-8''{quoted}"} - else: - names = {"filename": download_name} - - value = "attachment" if as_attachment else "inline" - headers.set("Content-Disposition", value, **names) - elif as_attachment: - raise TypeError( - "No name provided for attachment. Either set" - " 'download_name' or pass a path instead of a file." - ) - - if use_x_sendfile and path is not None: - headers["X-Sendfile"] = path - data = None - else: - if file is None: - file = open(path, "rb") # type: ignore - elif isinstance(file, io.BytesIO): - size = file.getbuffer().nbytes - elif isinstance(file, io.TextIOBase): - raise ValueError("Files must be opened in binary mode or use BytesIO.") - - data = wrap_file(environ, file) - - rv = response_class( - data, mimetype=mimetype, headers=headers, direct_passthrough=True - ) - - if size is not None: - rv.content_length = size - - if last_modified is not None: - rv.last_modified = last_modified # type: ignore - elif mtime is not None: - rv.last_modified = mtime # type: ignore - - rv.cache_control.no_cache = True - - # Flask will pass app.get_send_file_max_age, allowing its send_file - # wrapper to not have to deal with paths. - if callable(max_age): - max_age = max_age(path) - - if max_age is not None: - if max_age > 0: - rv.cache_control.no_cache = None - rv.cache_control.public = True - - rv.cache_control.max_age = max_age - rv.expires = int(time() + max_age) # type: ignore - - if isinstance(etag, str): - rv.set_etag(etag) - elif etag and path is not None: - check = adler32(path.encode("utf-8")) & 0xFFFFFFFF - rv.set_etag(f"{mtime}-{size}-{check}") - - if conditional: - try: - rv = rv.make_conditional(environ, accept_ranges=True, complete_length=size) - except RequestedRangeNotSatisfiable: - if file is not None: - file.close() - - raise - - # Some x-sendfile implementations incorrectly ignore the 304 - # status code and send the file anyway. - if rv.status_code == 304: - rv.headers.pop("x-sendfile", None) - - return rv - - -def send_from_directory( - directory: t.Union[os.PathLike, str], - path: t.Union[os.PathLike, str], - environ: "WSGIEnvironment", - **kwargs: t.Any, -) -> "Response": - """Send a file from within a directory using :func:`send_file`. - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - returns a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under. - :param path: The path to the file to send, relative to - ``directory``. - :param environ: The WSGI environ for the current request. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionadded:: 2.0 - Adapted from Flask's implementation. - """ - path = safe_join(os.fspath(directory), os.fspath(path)) - - if path is None: - raise NotFound() - - # Flask will pass app.root_path, allowing its send_from_directory - # wrapper to not have to deal with paths. - if "_root_path" in kwargs: - path = os.path.join(kwargs["_root_path"], path) - - try: - if not os.path.isfile(path): - raise NotFound() - except ValueError: - # path contains null byte on Python < 3.8 - raise NotFound() from None - - return send_file(path, environ, **kwargs) - - -def import_string(import_name: str, silent: bool = False) -> t.Any: - """Imports an object based on a string. This is useful if you want to - use import paths as endpoints or something similar. An import path can - be specified either in dotted notation (``xml.sax.saxutils.escape``) - or with a colon as object delimiter (``xml.sax.saxutils:escape``). - - If `silent` is True the return value will be `None` if the import fails. - - :param import_name: the dotted name for the object to import. - :param silent: if set to `True` import errors are ignored and - `None` is returned instead. - :return: imported object - """ - import_name = import_name.replace(":", ".") - try: - try: - __import__(import_name) - except ImportError: - if "." not in import_name: - raise - else: - return sys.modules[import_name] - - module_name, obj_name = import_name.rsplit(".", 1) - module = __import__(module_name, globals(), locals(), [obj_name]) - try: - return getattr(module, obj_name) - except AttributeError as e: - raise ImportError(e) from None - - except ImportError as e: - if not silent: - raise ImportStringError(import_name, e).with_traceback( - sys.exc_info()[2] - ) from None - - return None - - -def find_modules( - import_path: str, include_packages: bool = False, recursive: bool = False -) -> t.Iterator[str]: - """Finds all the modules below a package. This can be useful to - automatically import all views / controllers so that their metaclasses / - function decorators have a chance to register themselves on the - application. - - Packages are not returned unless `include_packages` is `True`. This can - also recursively list modules but in that case it will import all the - packages to get the correct load path of that module. - - :param import_path: the dotted name for the package to find child modules. - :param include_packages: set to `True` if packages should be returned, too. - :param recursive: set to `True` if recursion should happen. - :return: generator - """ - module = import_string(import_path) - path = getattr(module, "__path__", None) - if path is None: - raise ValueError(f"{import_path!r} is not a package") - basename = f"{module.__name__}." - for _importer, modname, ispkg in pkgutil.iter_modules(path): - modname = basename + modname - if ispkg: - if include_packages: - yield modname - if recursive: - yield from find_modules(modname, include_packages, True) - else: - yield modname - - -def validate_arguments(func, args, kwargs, drop_extra=True): # type: ignore - """Checks if the function accepts the arguments and keyword arguments. - Returns a new ``(args, kwargs)`` tuple that can safely be passed to - the function without causing a `TypeError` because the function signature - is incompatible. If `drop_extra` is set to `True` (which is the default) - any extra positional or keyword arguments are dropped automatically. - - The exception raised provides three attributes: - - `missing` - A set of argument names that the function expected but where - missing. - - `extra` - A dict of keyword arguments that the function can not handle but - where provided. - - `extra_positional` - A list of values that where given by positional argument but the - function cannot accept. - - This can be useful for decorators that forward user submitted data to - a view function:: - - from werkzeug.utils import ArgumentValidationError, validate_arguments - - def sanitize(f): - def proxy(request): - data = request.values.to_dict() - try: - args, kwargs = validate_arguments(f, (request,), data) - except ArgumentValidationError: - raise BadRequest('The browser failed to transmit all ' - 'the data expected.') - return f(*args, **kwargs) - return proxy - - :param func: the function the validation is performed against. - :param args: a tuple of positional arguments. - :param kwargs: a dict of keyword arguments. - :param drop_extra: set to `False` if you don't want extra arguments - to be silently dropped. - :return: tuple in the form ``(args, kwargs)``. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :func:`inspect.signature` - instead. - """ - warnings.warn( - "'utils.validate_arguments' is deprecated and will be removed" - " in Werkzeug 2.1. Use 'inspect.signature' instead.", - DeprecationWarning, - stacklevel=2, - ) - parser = _parse_signature(func) - args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5] - if missing: - raise ArgumentValidationError(tuple(missing)) - elif (extra or extra_positional) and not drop_extra: - raise ArgumentValidationError(None, extra, extra_positional) - return tuple(args), kwargs - - -def bind_arguments(func, args, kwargs): # type: ignore - """Bind the arguments provided into a dict. When passed a function, - a tuple of arguments and a dict of keyword arguments `bind_arguments` - returns a dict of names as the function would see it. This can be useful - to implement a cache decorator that uses the function arguments to build - the cache key based on the values of the arguments. - - :param func: the function the arguments should be bound for. - :param args: tuple of positional arguments. - :param kwargs: a dict of keyword arguments. - :return: a :class:`dict` of bound keyword arguments. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Use :meth:`Signature.bind` - instead. - """ - warnings.warn( - "'utils.bind_arguments' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'Signature.bind' instead.", - DeprecationWarning, - stacklevel=2, - ) - ( - args, - kwargs, - missing, - extra, - extra_positional, - arg_spec, - vararg_var, - kwarg_var, - ) = _parse_signature(func)(args, kwargs) - values = {} - for (name, _has_default, _default), value in zip(arg_spec, args): - values[name] = value - if vararg_var is not None: - values[vararg_var] = tuple(extra_positional) - elif extra_positional: - raise TypeError("too many positional arguments") - if kwarg_var is not None: - multikw = set(extra) & {x[0] for x in arg_spec} - if multikw: - raise TypeError( - f"got multiple values for keyword argument {next(iter(multikw))!r}" - ) - values[kwarg_var] = extra - elif extra: - raise TypeError(f"got unexpected keyword argument {next(iter(extra))!r}") - return values - - -class ArgumentValidationError(ValueError): - """Raised if :func:`validate_arguments` fails to validate - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1 along with ``utils.bind`` and - ``validate_arguments``. - """ - - def __init__(self, missing=None, extra=None, extra_positional=None): # type: ignore - self.missing = set(missing or ()) - self.extra = extra or {} - self.extra_positional = extra_positional or [] - super().__init__( - "function arguments invalid." - f" ({len(self.missing)} missing," - f" {len(self.extra) + len(self.extra_positional)} additional)" - ) - - -class ImportStringError(ImportError): - """Provides information about a failed :func:`import_string` attempt.""" - - #: String in dotted notation that failed to be imported. - import_name: str - #: Wrapped exception. - exception: BaseException - - def __init__(self, import_name: str, exception: BaseException) -> None: - self.import_name = import_name - self.exception = exception - msg = import_name - name = "" - tracked = [] - for part in import_name.replace(":", ".").split("."): - name = f"{name}.{part}" if name else part - imported = import_string(name, silent=True) - if imported: - tracked.append((name, getattr(imported, "__file__", None))) - else: - track = [f"- {n!r} found in {i!r}." for n, i in tracked] - track.append(f"- {name!r} not found.") - track_str = "\n".join(track) - msg = ( - f"import_string() failed for {import_name!r}. Possible reasons" - f" are:\n\n" - "- missing __init__.py in a package;\n" - "- package or module path not included in sys.path;\n" - "- duplicated package or module name taking precedence in" - " sys.path;\n" - "- missing module, class, function or variable;\n\n" - f"Debugged import:\n\n{track_str}\n\n" - f"Original exception:\n\n{type(exception).__name__}: {exception}" - ) - break - - super().__init__(msg) - - def __repr__(self) -> str: - return f"<{type(self).__name__}({self.import_name!r}, {self.exception!r})>" diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py deleted file mode 100644 index eb69a99..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from .accept import AcceptMixin -from .auth import AuthorizationMixin -from .auth import WWWAuthenticateMixin -from .base_request import BaseRequest -from .base_response import BaseResponse -from .common_descriptors import CommonRequestDescriptorsMixin -from .common_descriptors import CommonResponseDescriptorsMixin -from .etag import ETagRequestMixin -from .etag import ETagResponseMixin -from .request import PlainRequest -from .request import Request as Request -from .request import StreamOnlyMixin -from .response import Response as Response -from .response import ResponseStream -from .response import ResponseStreamMixin -from .user_agent import UserAgentMixin diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py deleted file mode 100644 index 9605e63..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/accept.py +++ /dev/null @@ -1,14 +0,0 @@ -import typing as t -import warnings - - -class AcceptMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'AcceptMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py deleted file mode 100644 index da31b7c..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/auth.py +++ /dev/null @@ -1,26 +0,0 @@ -import typing as t -import warnings - - -class AuthorizationMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'AuthorizationMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore - - -class WWWAuthenticateMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'WWWAuthenticateMixin' is deprecated and will be removed" - " in Werkzeug 2.1. 'Response' now includes the" - " functionality directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py deleted file mode 100644 index 451989f..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_request.py +++ /dev/null @@ -1,36 +0,0 @@ -import typing as t -import warnings - -from .request import Request - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: t.Type) -> bool: - warnings.warn( - "'BaseRequest' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.", - DeprecationWarning, - stacklevel=2, - ) - return issubclass(subclass, Request) - - def __instancecheck__(cls, instance: t.Any) -> bool: - warnings.warn( - "'BaseRequest' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.", - DeprecationWarning, - stacklevel=2, - ) - return isinstance(instance, Request) - - -class BaseRequest(Request, metaclass=_FakeSubclassCheck): - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'BaseRequest' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py deleted file mode 100644 index 3e0dc67..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/base_response.py +++ /dev/null @@ -1,36 +0,0 @@ -import typing as t -import warnings - -from .response import Response - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: t.Type) -> bool: - warnings.warn( - "'BaseResponse' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'issubclass(cls, Response)' instead.", - DeprecationWarning, - stacklevel=2, - ) - return issubclass(subclass, Response) - - def __instancecheck__(cls, instance: t.Any) -> bool: - warnings.warn( - "'BaseResponse' is deprecated and will be removed in" - " Werkzeug 2.1. Use 'isinstance(obj, Response)' instead.", - DeprecationWarning, - stacklevel=2, - ) - return isinstance(instance, Response) - - -class BaseResponse(Response, metaclass=_FakeSubclassCheck): - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'BaseResponse' is deprecated and will be removed in" - " Werkzeug 2.1. 'Response' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py deleted file mode 100644 index db87ea5..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/common_descriptors.py +++ /dev/null @@ -1,26 +0,0 @@ -import typing as t -import warnings - - -class CommonRequestDescriptorsMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'CommonRequestDescriptorsMixin' is deprecated and will be" - " removed in Werkzeug 2.1. 'Request' now includes the" - " functionality directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore - - -class CommonResponseDescriptorsMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'CommonResponseDescriptorsMixin' is deprecated and will be" - " removed in Werkzeug 2.1. 'Response' now includes the" - " functionality directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py deleted file mode 100644 index 89cf83e..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/cors.py +++ /dev/null @@ -1,26 +0,0 @@ -import typing as t -import warnings - - -class CORSRequestMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'CORSRequestMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore - - -class CORSResponseMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'CORSResponseMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Response' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py deleted file mode 100644 index 2e9015a..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/etag.py +++ /dev/null @@ -1,26 +0,0 @@ -import typing as t -import warnings - - -class ETagRequestMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'ETagRequestMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore - - -class ETagResponseMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'ETagResponseMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Response' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py deleted file mode 100644 index ab6ed7b..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/json.py +++ /dev/null @@ -1,13 +0,0 @@ -import typing as t -import warnings - - -class JSONMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'JSONMixin' is deprecated and will be removed in Werkzeug" - " 2.1. 'Request' now includes the functionality directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py deleted file mode 100644 index 700cda0..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/request.py +++ /dev/null @@ -1,660 +0,0 @@ -import functools -import json -import typing -import typing as t -import warnings -from io import BytesIO - -from .._internal import _wsgi_decoding_dance -from ..datastructures import CombinedMultiDict -from ..datastructures import EnvironHeaders -from ..datastructures import FileStorage -from ..datastructures import ImmutableMultiDict -from ..datastructures import iter_multi_items -from ..datastructures import MultiDict -from ..formparser import default_stream_factory -from ..formparser import FormDataParser -from ..sansio.request import Request as _SansIORequest -from ..utils import cached_property -from ..utils import environ_property -from ..wsgi import _get_server -from ..wsgi import get_input_stream -from werkzeug.exceptions import BadRequest - -if t.TYPE_CHECKING: - import typing_extensions as te - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -class Request(_SansIORequest): - """Represents an incoming WSGI HTTP request, with headers and body - taken from the WSGI environment. Has properties and methods for - using the functionality defined by various HTTP specs. The data in - requests object is read-only. - - Text data is assumed to use UTF-8 encoding, which should be true for - the vast majority of modern clients. Using an encoding set by the - client is unsafe in Python due to extra encodings it provides, such - as ``zip``. To change the assumed encoding, subclass and replace - :attr:`charset`. - - :param environ: The WSGI environ is generated by the WSGI server and - contains information about the server configuration and client - request. - :param populate_request: Add this request object to the WSGI environ - as ``environ['werkzeug.request']``. Can be useful when - debugging. - :param shallow: Makes reading from :attr:`stream` (and any method - that would read from it) raise a :exc:`RuntimeError`. Useful to - prevent consuming the form data in middleware, which would make - it unavailable to the final application. - - .. versionchanged:: 2.0 - Combine ``BaseRequest`` and mixins into a single ``Request`` - class. Using the old classes is deprecated and will be removed - in Werkzeug 2.1. - - .. versionchanged:: 0.5 - Read-only mode is enforced with immutable classes for all data. - """ - - #: the maximum content length. This is forwarded to the form data - #: parsing function (:func:`parse_form_data`). When set and the - #: :attr:`form` or :attr:`files` attribute is accessed and the - #: parsing fails because more than the specified value is transmitted - #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. - #: - #: Have a look at :doc:`/request_data` for more details. - #: - #: .. versionadded:: 0.5 - max_content_length: t.Optional[int] = None - - #: the maximum form field size. This is forwarded to the form data - #: parsing function (:func:`parse_form_data`). When set and the - #: :attr:`form` or :attr:`files` attribute is accessed and the - #: data in memory for post data is longer than the specified value a - #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. - #: - #: Have a look at :doc:`/request_data` for more details. - #: - #: .. versionadded:: 0.5 - max_form_memory_size: t.Optional[int] = None - - #: The form data parser that shoud be used. Can be replaced to customize - #: the form date parsing. - form_data_parser_class: t.Type[FormDataParser] = FormDataParser - - #: Disable the :attr:`data` property to avoid reading from the input - #: stream. - #: - #: .. deprecated:: 2.0 - #: Will be removed in Werkzeug 2.1. Create the request with - #: ``shallow=True`` instead. - #: - #: .. versionadded:: 0.9 - disable_data_descriptor: t.Optional[bool] = None - - #: The WSGI environment containing HTTP headers and information from - #: the WSGI server. - environ: "WSGIEnvironment" - - #: Set when creating the request object. If ``True``, reading from - #: the request body will cause a ``RuntimeException``. Useful to - #: prevent modifying the stream from middleware. - shallow: bool - - def __init__( - self, - environ: "WSGIEnvironment", - populate_request: bool = True, - shallow: bool = False, - ) -> None: - super().__init__( - method=environ.get("REQUEST_METHOD", "GET"), - scheme=environ.get("wsgi.url_scheme", "http"), - server=_get_server(environ), - root_path=_wsgi_decoding_dance( - environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors - ), - path=_wsgi_decoding_dance( - environ.get("PATH_INFO") or "", self.charset, self.encoding_errors - ), - query_string=environ.get("QUERY_STRING", "").encode("latin1"), - headers=EnvironHeaders(environ), - remote_addr=environ.get("REMOTE_ADDR"), - ) - self.environ = environ - - if self.disable_data_descriptor is not None: - warnings.warn( - "'disable_data_descriptor' is deprecated and will be" - " removed in Werkzeug 2.1. Create the request with" - " 'shallow=True' instead.", - DeprecationWarning, - stacklevel=2, - ) - shallow = shallow or self.disable_data_descriptor - - self.shallow = shallow - - if populate_request and not shallow: - self.environ["werkzeug.request"] = self - - @classmethod - def from_values(cls, *args: t.Any, **kwargs: t.Any) -> "Request": - """Create a new request object based on the values provided. If - environ is given missing values are filled from there. This method is - useful for small scripts when you need to simulate a request from an URL. - Do not use this method for unittesting, there is a full featured client - object (:class:`Client`) that allows to create multipart requests, - support for cookies etc. - - This accepts the same options as the - :class:`~werkzeug.test.EnvironBuilder`. - - .. versionchanged:: 0.5 - This method now accepts the same arguments as - :class:`~werkzeug.test.EnvironBuilder`. Because of this the - `environ` parameter is now called `environ_overrides`. - - :return: request object - """ - from ..test import EnvironBuilder - - charset = kwargs.pop("charset", cls.charset) - kwargs["charset"] = charset - builder = EnvironBuilder(*args, **kwargs) - try: - return builder.get_request(cls) - finally: - builder.close() - - @classmethod - def application( - cls, f: t.Callable[["Request"], "WSGIApplication"] - ) -> "WSGIApplication": - """Decorate a function as responder that accepts the request as - the last argument. This works like the :func:`responder` - decorator but the function is passed the request object as the - last argument and the request object will be closed - automatically:: - - @Request.application - def my_wsgi_app(request): - return Response('Hello World!') - - As of Werkzeug 0.14 HTTP exceptions are automatically caught and - converted to responses instead of failing. - - :param f: the WSGI callable to decorate - :return: a new WSGI callable - """ - #: return a callable that wraps the -2nd argument with the request - #: and calls the function with all the arguments up to that one and - #: the request. The return value is then called with the latest - #: two arguments. This makes it possible to use this decorator for - #: both standalone WSGI functions as well as bound methods and - #: partially applied functions. - from ..exceptions import HTTPException - - @functools.wraps(f) - def application(*args): # type: ignore - request = cls(args[-2]) - with request: - try: - resp = f(*args[:-2] + (request,)) - except HTTPException as e: - resp = e.get_response(args[-2]) - return resp(*args[-2:]) - - return t.cast("WSGIApplication", application) - - def _get_file_stream( - self, - total_content_length: t.Optional[int], - content_type: t.Optional[str], - filename: t.Optional[str] = None, - content_length: t.Optional[int] = None, - ) -> t.IO[bytes]: - """Called to get a stream for the file upload. - - This must provide a file-like class with `read()`, `readline()` - and `seek()` methods that is both writeable and readable. - - The default implementation returns a temporary file if the total - content length is higher than 500KB. Because many browsers do not - provide a content length for the files only the total content - length matters. - - :param total_content_length: the total content length of all the - data in the request combined. This value - is guaranteed to be there. - :param content_type: the mimetype of the uploaded file. - :param filename: the filename of the uploaded file. May be `None`. - :param content_length: the length of this file. This value is usually - not provided because webbrowsers do not provide - this value. - """ - return default_stream_factory( - total_content_length=total_content_length, - filename=filename, - content_type=content_type, - content_length=content_length, - ) - - @property - def want_form_data_parsed(self) -> bool: - """``True`` if the request method carries content. By default - this is true if a ``Content-Type`` is sent. - - .. versionadded:: 0.8 - """ - return bool(self.environ.get("CONTENT_TYPE")) - - def make_form_data_parser(self) -> FormDataParser: - """Creates the form data parser. Instantiates the - :attr:`form_data_parser_class` with some parameters. - - .. versionadded:: 0.8 - """ - return self.form_data_parser_class( - self._get_file_stream, - self.charset, - self.encoding_errors, - self.max_form_memory_size, - self.max_content_length, - self.parameter_storage_class, - ) - - def _load_form_data(self) -> None: - """Method used internally to retrieve submitted data. After calling - this sets `form` and `files` on the request object to multi dicts - filled with the incoming form data. As a matter of fact the input - stream will be empty afterwards. You can also call this method to - force the parsing of the form data. - - .. versionadded:: 0.8 - """ - # abort early if we have already consumed the stream - if "form" in self.__dict__: - return - - if self.want_form_data_parsed: - parser = self.make_form_data_parser() - data = parser.parse( - self._get_stream_for_parsing(), - self.mimetype, - self.content_length, - self.mimetype_params, - ) - else: - data = ( - self.stream, - self.parameter_storage_class(), - self.parameter_storage_class(), - ) - - # inject the values into the instance dict so that we bypass - # our cached_property non-data descriptor. - d = self.__dict__ - d["stream"], d["form"], d["files"] = data - - def _get_stream_for_parsing(self) -> t.IO[bytes]: - """This is the same as accessing :attr:`stream` with the difference - that if it finds cached data from calling :meth:`get_data` first it - will create a new stream out of the cached data. - - .. versionadded:: 0.9.3 - """ - cached_data = getattr(self, "_cached_data", None) - if cached_data is not None: - return BytesIO(cached_data) - return self.stream - - def close(self) -> None: - """Closes associated resources of this request object. This - closes all file handles explicitly. You can also use the request - object in a with statement which will automatically close it. - - .. versionadded:: 0.9 - """ - files = self.__dict__.get("files") - for _key, value in iter_multi_items(files or ()): - value.close() - - def __enter__(self) -> "Request": - return self - - def __exit__(self, exc_type, exc_value, tb) -> None: # type: ignore - self.close() - - @cached_property - def stream(self) -> t.IO[bytes]: - """ - If the incoming form data was not encoded with a known mimetype - the data is stored unmodified in this stream for consumption. Most - of the time it is a better idea to use :attr:`data` which will give - you that data as a string. The stream only returns the data once. - - Unlike :attr:`input_stream` this stream is properly guarded that you - can't accidentally read past the length of the input. Werkzeug will - internally always refer to this stream to read data which makes it - possible to wrap this object with a stream that does filtering. - - .. versionchanged:: 0.9 - This stream is now always available but might be consumed by the - form parser later on. Previously the stream was only set if no - parsing happened. - """ - if self.shallow: - raise RuntimeError( - "This request was created with 'shallow=True', reading" - " from the input stream is disabled." - ) - - return get_input_stream(self.environ) - - input_stream = environ_property[t.IO[bytes]]( - "wsgi.input", - doc="""The WSGI input stream. - - In general it's a bad idea to use this one because you can - easily read past the boundary. Use the :attr:`stream` - instead.""", - ) - - @cached_property - def data(self) -> bytes: - """ - Contains the incoming request data as string in case it came with - a mimetype Werkzeug does not handle. - """ - return self.get_data(parse_form_data=True) - - @typing.overload - def get_data( # type: ignore - self, - cache: bool = True, - as_text: "te.Literal[False]" = False, - parse_form_data: bool = False, - ) -> bytes: - ... - - @typing.overload - def get_data( - self, - cache: bool = True, - as_text: "te.Literal[True]" = ..., - parse_form_data: bool = False, - ) -> str: - ... - - def get_data( - self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False - ) -> t.Union[bytes, str]: - """This reads the buffered incoming data from the client into one - bytes object. By default this is cached but that behavior can be - changed by setting `cache` to `False`. - - Usually it's a bad idea to call this method without checking the - content length first as a client could send dozens of megabytes or more - to cause memory problems on the server. - - Note that if the form data was already parsed this method will not - return anything as form data parsing does not cache the data like - this method does. To implicitly invoke form data parsing function - set `parse_form_data` to `True`. When this is done the return value - of this method will be an empty string if the form parser handles - the data. This generally is not necessary as if the whole data is - cached (which is the default) the form parser will used the cached - data to parse the form data. Please be generally aware of checking - the content length first in any case before calling this method - to avoid exhausting server memory. - - If `as_text` is set to `True` the return value will be a decoded - string. - - .. versionadded:: 0.9 - """ - rv = getattr(self, "_cached_data", None) - if rv is None: - if parse_form_data: - self._load_form_data() - rv = self.stream.read() - if cache: - self._cached_data = rv - if as_text: - rv = rv.decode(self.charset, self.encoding_errors) - return rv # type: ignore - - @cached_property - def form(self) -> "ImmutableMultiDict[str, str]": - """The form parameters. By default an - :class:`~werkzeug.datastructures.ImmutableMultiDict` - is returned from this function. This can be changed by setting - :attr:`parameter_storage_class` to a different type. This might - be necessary if the order of the form data is important. - - Please keep in mind that file uploads will not end up here, but instead - in the :attr:`files` attribute. - - .. versionchanged:: 0.9 - - Previous to Werkzeug 0.9 this would only contain form data for POST - and PUT requests. - """ - self._load_form_data() - return self.form - - @cached_property - def values(self) -> "CombinedMultiDict[str, str]": - """A :class:`werkzeug.datastructures.CombinedMultiDict` that - combines :attr:`args` and :attr:`form`. - - For GET requests, only ``args`` are present, not ``form``. - - .. versionchanged:: 2.0 - For GET requests, only ``args`` are present, not ``form``. - """ - sources = [self.args] - - if self.method != "GET": - # GET requests can have a body, and some caching proxies - # might not treat that differently than a normal GET - # request, allowing form data to "invisibly" affect the - # cache without indication in the query string / URL. - sources.append(self.form) - - args = [] - - for d in sources: - if not isinstance(d, MultiDict): - d = MultiDict(d) - - args.append(d) - - return CombinedMultiDict(args) - - @cached_property - def files(self) -> "ImmutableMultiDict[str, FileStorage]": - """:class:`~werkzeug.datastructures.MultiDict` object containing - all uploaded files. Each key in :attr:`files` is the name from the - ````. Each value in :attr:`files` is a - Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. - - It basically behaves like a standard file object you know from Python, - with the difference that it also has a - :meth:`~werkzeug.datastructures.FileStorage.save` function that can - store the file on the filesystem. - - Note that :attr:`files` will only contain data if the request method was - POST, PUT or PATCH and the ```` that posted to the request had - ``enctype="multipart/form-data"``. It will be empty otherwise. - - See the :class:`~werkzeug.datastructures.MultiDict` / - :class:`~werkzeug.datastructures.FileStorage` documentation for - more details about the used data structure. - """ - self._load_form_data() - return self.files - - @property - def script_root(self) -> str: - """Alias for :attr:`self.root_path`. ``environ["SCRIPT_ROOT"]`` - without a trailing slash. - """ - return self.root_path - - @cached_property - def url_root(self) -> str: - """Alias for :attr:`root_url`. The URL with scheme, host, and - root path. For example, ``https://example.com/app/``. - """ - return self.root_url - - remote_user = environ_property[str]( - "REMOTE_USER", - doc="""If the server supports user authentication, and the - script is protected, this attribute contains the username the - user has authenticated as.""", - ) - is_multithread = environ_property[bool]( - "wsgi.multithread", - doc="""boolean that is `True` if the application is served by a - multithreaded WSGI server.""", - ) - is_multiprocess = environ_property[bool]( - "wsgi.multiprocess", - doc="""boolean that is `True` if the application is served by a - WSGI server that spawns multiple processes.""", - ) - is_run_once = environ_property[bool]( - "wsgi.run_once", - doc="""boolean that is `True` if the application will be - executed only once in a process lifetime. This is the case for - CGI for example, but it's not guaranteed that the execution only - happens one time.""", - ) - - # JSON - - #: A module or other object that has ``dumps`` and ``loads`` - #: functions that match the API of the built-in :mod:`json` module. - json_module = json - - @property - def json(self) -> t.Optional[t.Any]: - """The parsed JSON data if :attr:`mimetype` indicates JSON - (:mimetype:`application/json`, see :attr:`is_json`). - - Calls :meth:`get_json` with default arguments. - """ - return self.get_json() - - # Cached values for ``(silent=False, silent=True)``. Initialized - # with sentinel values. - _cached_json: t.Tuple[t.Any, t.Any] = (Ellipsis, Ellipsis) - - def get_json( - self, force: bool = False, silent: bool = False, cache: bool = True - ) -> t.Optional[t.Any]: - """Parse :attr:`data` as JSON. - - If the mimetype does not indicate JSON - (:mimetype:`application/json`, see :attr:`is_json`), this - returns ``None``. - - If parsing fails, :meth:`on_json_loading_failed` is called and - its return value is used as the return value. - - :param force: Ignore the mimetype and always try to parse JSON. - :param silent: Silence parsing errors and return ``None`` - instead. - :param cache: Store the parsed JSON to return for subsequent - calls. - """ - if cache and self._cached_json[silent] is not Ellipsis: - return self._cached_json[silent] - - if not (force or self.is_json): - return None - - data = self.get_data(cache=cache) - - try: - rv = self.json_module.loads(data) - except ValueError as e: - if silent: - rv = None - - if cache: - normal_rv, _ = self._cached_json - self._cached_json = (normal_rv, rv) - else: - rv = self.on_json_loading_failed(e) - - if cache: - _, silent_rv = self._cached_json - self._cached_json = (rv, silent_rv) - else: - if cache: - self._cached_json = (rv, rv) - - return rv - - def on_json_loading_failed(self, e: ValueError) -> t.Any: - """Called if :meth:`get_json` parsing fails and isn't silenced. - If this method returns a value, it is used as the return value - for :meth:`get_json`. The default implementation raises - :exc:`~werkzeug.exceptions.BadRequest`. - """ - raise BadRequest(f"Failed to decode JSON object: {e}") - - -class StreamOnlyMixin: - """Mixin to create a ``Request`` that disables the ``data``, - ``form``, and ``files`` properties. Only ``stream`` is available. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Create the request with - ``shallow=True`` instead. - - .. versionadded:: 0.9 - """ - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'StreamOnlyMixin' is deprecated and will be removed in" - " Werkzeug 2.1. Create the request with 'shallow=True'" - " instead.", - DeprecationWarning, - stacklevel=2, - ) - kwargs["shallow"] = True - super().__init__(*args, **kwargs) # type: ignore - - -class PlainRequest(StreamOnlyMixin, Request): - """A request object without ``data``, ``form``, and ``files``. - - .. deprecated:: 2.0 - Will be removed in Werkzeug 2.1. Create the request with - ``shallow=True`` instead. - - .. versionadded:: 0.9 - """ - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'PlainRequest' is deprecated and will be removed in" - " Werkzeug 2.1. Create the request with 'shallow=True'" - " instead.", - DeprecationWarning, - stacklevel=2, - ) - - # Don't show the DeprecationWarning for StreamOnlyMixin. - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - super().__init__(*args, **kwargs) diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py deleted file mode 100644 index d365c4e..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/response.py +++ /dev/null @@ -1,890 +0,0 @@ -import json -import typing -import typing as t -import warnings -from http import HTTPStatus - -from .._internal import _to_bytes -from ..datastructures import Headers -from ..http import remove_entity_headers -from ..sansio.response import Response as _SansIOResponse -from ..urls import iri_to_uri -from ..urls import url_join -from ..utils import cached_property -from ..wsgi import ClosingIterator -from ..wsgi import get_current_url -from werkzeug._internal import _get_environ -from werkzeug.http import generate_etag -from werkzeug.http import http_date -from werkzeug.http import is_resource_modified -from werkzeug.http import parse_etags -from werkzeug.http import parse_range_header -from werkzeug.wsgi import _RangeWrapper - -if t.TYPE_CHECKING: - import typing_extensions as te - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -def _warn_if_string(iterable: t.Iterable) -> None: - """Helper for the response objects to check if the iterable returned - to the WSGI server is not a string. - """ - if isinstance(iterable, str): - warnings.warn( - "Response iterable was set to a string. This will appear to" - " work but means that the server will send the data to the" - " client one character at a time. This is almost never" - " intended behavior, use 'response.data' to assign strings" - " to the response object.", - stacklevel=2, - ) - - -def _iter_encoded( - iterable: t.Iterable[t.Union[str, bytes]], charset: str -) -> t.Iterator[bytes]: - for item in iterable: - if isinstance(item, str): - yield item.encode(charset) - else: - yield item - - -def _clean_accept_ranges(accept_ranges: t.Union[bool, str]) -> str: - if accept_ranges is True: - return "bytes" - elif accept_ranges is False: - return "none" - elif isinstance(accept_ranges, str): - return accept_ranges - raise ValueError("Invalid accept_ranges value") - - -class Response(_SansIOResponse): - """Represents an outgoing WSGI HTTP response with body, status, and - headers. Has properties and methods for using the functionality - defined by various HTTP specs. - - The response body is flexible to support different use cases. The - simple form is passing bytes, or a string which will be encoded as - UTF-8. Passing an iterable of bytes or strings makes this a - streaming response. A generator is particularly useful for building - a CSV file in memory or using SSE (Server Sent Events). A file-like - object is also iterable, although the - :func:`~werkzeug.utils.send_file` helper should be used in that - case. - - The response object is itself a WSGI application callable. When - called (:meth:`__call__`) with ``environ`` and ``start_response``, - it will pass its status and headers to ``start_response`` then - return its body as an iterable. - - .. code-block:: python - - from werkzeug.wrappers.response import Response - - def index(): - return Response("Hello, World!") - - def application(environ, start_response): - path = environ.get("PATH_INFO") or "/" - - if path == "/": - response = index() - else: - response = Response("Not Found", status=404) - - return response(environ, start_response) - - :param response: The data for the body of the response. A string or - bytes, or tuple or list of strings or bytes, for a fixed-length - response, or any other iterable of strings or bytes for a - streaming response. Defaults to an empty body. - :param status: The status code for the response. Either an int, in - which case the default status message is added, or a string in - the form ``{code} {message}``, like ``404 Not Found``. Defaults - to 200. - :param headers: A :class:`~werkzeug.datastructures.Headers` object, - or a list of ``(key, value)`` tuples that will be converted to a - ``Headers`` object. - :param mimetype: The mime type (content type without charset or - other parameters) of the response. If the value starts with - ``text/`` (or matches some other special cases), the charset - will be added to create the ``content_type``. - :param content_type: The full content type of the response. - Overrides building the value from ``mimetype``. - :param direct_passthrough: Pass the response body directly through - as the WSGI iterable. This can be used when the body is a binary - file or other iterator of bytes, to skip some unnecessary - checks. Use :func:`~werkzeug.utils.send_file` instead of setting - this manually. - - .. versionchanged:: 2.0 - Combine ``BaseResponse`` and mixins into a single ``Response`` - class. Using the old classes is deprecated and will be removed - in Werkzeug 2.1. - - .. versionchanged:: 0.5 - The ``direct_passthrough`` parameter was added. - """ - - #: if set to `False` accessing properties on the response object will - #: not try to consume the response iterator and convert it into a list. - #: - #: .. versionadded:: 0.6.2 - #: - #: That attribute was previously called `implicit_seqence_conversion`. - #: (Notice the typo). If you did use this feature, you have to adapt - #: your code to the name change. - implicit_sequence_conversion = True - - #: Should this response object correct the location header to be RFC - #: conformant? This is true by default. - #: - #: .. versionadded:: 0.8 - autocorrect_location_header = True - - #: Should this response object automatically set the content-length - #: header if possible? This is true by default. - #: - #: .. versionadded:: 0.8 - automatically_set_content_length = True - - #: The response body to send as the WSGI iterable. A list of strings - #: or bytes represents a fixed-length response, any other iterable - #: is a streaming response. Strings are encoded to bytes as UTF-8. - #: - #: Do not set to a plain string or bytes, that will cause sending - #: the response to be very inefficient as it will iterate one byte - #: at a time. - response: t.Union[t.Iterable[str], t.Iterable[bytes]] - - def __init__( - self, - response: t.Optional[ - t.Union[t.Iterable[bytes], bytes, t.Iterable[str], str] - ] = None, - status: t.Optional[t.Union[int, str, HTTPStatus]] = None, - headers: t.Optional[ - t.Union[ - t.Mapping[str, t.Union[str, int, t.Iterable[t.Union[str, int]]]], - t.Iterable[t.Tuple[str, t.Union[str, int]]], - ] - ] = None, - mimetype: t.Optional[str] = None, - content_type: t.Optional[str] = None, - direct_passthrough: bool = False, - ) -> None: - super().__init__( - status=status, - headers=headers, - mimetype=mimetype, - content_type=content_type, - ) - - #: Pass the response body directly through as the WSGI iterable. - #: This can be used when the body is a binary file or other - #: iterator of bytes, to skip some unnecessary checks. Use - #: :func:`~werkzeug.utils.send_file` instead of setting this - #: manually. - self.direct_passthrough = direct_passthrough - self._on_close: t.List[t.Callable[[], t.Any]] = [] - - # we set the response after the headers so that if a class changes - # the charset attribute, the data is set in the correct charset. - if response is None: - self.response = [] - elif isinstance(response, (str, bytes, bytearray)): - self.set_data(response) - else: - self.response = response - - def call_on_close(self, func: t.Callable[[], t.Any]) -> t.Callable[[], t.Any]: - """Adds a function to the internal list of functions that should - be called as part of closing down the response. Since 0.7 this - function also returns the function that was passed so that this - can be used as a decorator. - - .. versionadded:: 0.6 - """ - self._on_close.append(func) - return func - - def __repr__(self) -> str: - if self.is_sequence: - body_info = f"{sum(map(len, self.iter_encoded()))} bytes" - else: - body_info = "streamed" if self.is_streamed else "likely-streamed" - return f"<{type(self).__name__} {body_info} [{self.status}]>" - - @classmethod - def force_type( - cls, response: "Response", environ: t.Optional["WSGIEnvironment"] = None - ) -> "Response": - """Enforce that the WSGI response is a response object of the current - type. Werkzeug will use the :class:`Response` internally in many - situations like the exceptions. If you call :meth:`get_response` on an - exception you will get back a regular :class:`Response` object, even - if you are using a custom subclass. - - This method can enforce a given response type, and it will also - convert arbitrary WSGI callables into response objects if an environ - is provided:: - - # convert a Werkzeug response object into an instance of the - # MyResponseClass subclass. - response = MyResponseClass.force_type(response) - - # convert any WSGI application into a response object - response = MyResponseClass.force_type(response, environ) - - This is especially useful if you want to post-process responses in - the main dispatcher and use functionality provided by your subclass. - - Keep in mind that this will modify response objects in place if - possible! - - :param response: a response object or wsgi application. - :param environ: a WSGI environment object. - :return: a response object. - """ - if not isinstance(response, Response): - if environ is None: - raise TypeError( - "cannot convert WSGI application into response" - " objects without an environ" - ) - - from ..test import run_wsgi_app - - response = Response(*run_wsgi_app(response, environ)) - - response.__class__ = cls - return response - - @classmethod - def from_app( - cls, app: "WSGIApplication", environ: "WSGIEnvironment", buffered: bool = False - ) -> "Response": - """Create a new response object from an application output. This - works best if you pass it an application that returns a generator all - the time. Sometimes applications may use the `write()` callable - returned by the `start_response` function. This tries to resolve such - edge cases automatically. But if you don't get the expected output - you should set `buffered` to `True` which enforces buffering. - - :param app: the WSGI application to execute. - :param environ: the WSGI environment to execute against. - :param buffered: set to `True` to enforce buffering. - :return: a response object. - """ - from ..test import run_wsgi_app - - return cls(*run_wsgi_app(app, environ, buffered)) - - @typing.overload - def get_data(self, as_text: "te.Literal[False]" = False) -> bytes: - ... - - @typing.overload - def get_data(self, as_text: "te.Literal[True]") -> str: - ... - - def get_data(self, as_text: bool = False) -> t.Union[bytes, str]: - """The string representation of the response body. Whenever you call - this property the response iterable is encoded and flattened. This - can lead to unwanted behavior if you stream big data. - - This behavior can be disabled by setting - :attr:`implicit_sequence_conversion` to `False`. - - If `as_text` is set to `True` the return value will be a decoded - string. - - .. versionadded:: 0.9 - """ - self._ensure_sequence() - rv = b"".join(self.iter_encoded()) - - if as_text: - return rv.decode(self.charset) - - return rv - - def set_data(self, value: t.Union[bytes, str]) -> None: - """Sets a new string as response. The value must be a string or - bytes. If a string is set it's encoded to the charset of the - response (utf-8 by default). - - .. versionadded:: 0.9 - """ - # if a string is set, it's encoded directly so that we - # can set the content length - if isinstance(value, str): - value = value.encode(self.charset) - else: - value = bytes(value) - self.response = [value] - if self.automatically_set_content_length: - self.headers["Content-Length"] = str(len(value)) - - data = property( - get_data, - set_data, - doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.", - ) - - def calculate_content_length(self) -> t.Optional[int]: - """Returns the content length if available or `None` otherwise.""" - try: - self._ensure_sequence() - except RuntimeError: - return None - return sum(len(x) for x in self.iter_encoded()) - - def _ensure_sequence(self, mutable: bool = False) -> None: - """This method can be called by methods that need a sequence. If - `mutable` is true, it will also ensure that the response sequence - is a standard Python list. - - .. versionadded:: 0.6 - """ - if self.is_sequence: - # if we need a mutable object, we ensure it's a list. - if mutable and not isinstance(self.response, list): - self.response = list(self.response) # type: ignore - return - if self.direct_passthrough: - raise RuntimeError( - "Attempted implicit sequence conversion but the" - " response object is in direct passthrough mode." - ) - if not self.implicit_sequence_conversion: - raise RuntimeError( - "The response object required the iterable to be a" - " sequence, but the implicit conversion was disabled." - " Call make_sequence() yourself." - ) - self.make_sequence() - - def make_sequence(self) -> None: - """Converts the response iterator in a list. By default this happens - automatically if required. If `implicit_sequence_conversion` is - disabled, this method is not automatically called and some properties - might raise exceptions. This also encodes all the items. - - .. versionadded:: 0.6 - """ - if not self.is_sequence: - # if we consume an iterable we have to ensure that the close - # method of the iterable is called if available when we tear - # down the response - close = getattr(self.response, "close", None) - self.response = list(self.iter_encoded()) - if close is not None: - self.call_on_close(close) - - def iter_encoded(self) -> t.Iterator[bytes]: - """Iter the response encoded with the encoding of the response. - If the response object is invoked as WSGI application the return - value of this method is used as application iterator unless - :attr:`direct_passthrough` was activated. - """ - if __debug__: - _warn_if_string(self.response) - # Encode in a separate function so that self.response is fetched - # early. This allows us to wrap the response with the return - # value from get_app_iter or iter_encoded. - return _iter_encoded(self.response, self.charset) - - @property - def is_streamed(self) -> bool: - """If the response is streamed (the response is not an iterable with - a length information) this property is `True`. In this case streamed - means that there is no information about the number of iterations. - This is usually `True` if a generator is passed to the response object. - - This is useful for checking before applying some sort of post - filtering that should not take place for streamed responses. - """ - try: - len(self.response) # type: ignore - except (TypeError, AttributeError): - return True - return False - - @property - def is_sequence(self) -> bool: - """If the iterator is buffered, this property will be `True`. A - response object will consider an iterator to be buffered if the - response attribute is a list or tuple. - - .. versionadded:: 0.6 - """ - return isinstance(self.response, (tuple, list)) - - def close(self) -> None: - """Close the wrapped response if possible. You can also use the object - in a with statement which will automatically close it. - - .. versionadded:: 0.9 - Can now be used in a with statement. - """ - if hasattr(self.response, "close"): - self.response.close() # type: ignore - for func in self._on_close: - func() - - def __enter__(self) -> "Response": - return self - - def __exit__(self, exc_type, exc_value, tb): # type: ignore - self.close() - - def freeze(self, no_etag: None = None) -> None: - """Make the response object ready to be pickled. Does the - following: - - * Buffer the response into a list, ignoring - :attr:`implicity_sequence_conversion` and - :attr:`direct_passthrough`. - * Set the ``Content-Length`` header. - * Generate an ``ETag`` header if one is not already set. - - .. versionchanged:: 2.0 - An ``ETag`` header is added, the ``no_etag`` parameter is - deprecated and will be removed in Werkzeug 2.1. - - .. versionchanged:: 0.6 - The ``Content-Length`` header is set. - """ - # Always freeze the encoded response body, ignore - # implicit_sequence_conversion and direct_passthrough. - self.response = list(self.iter_encoded()) - self.headers["Content-Length"] = str(sum(map(len, self.response))) - - if no_etag is not None: - warnings.warn( - "The 'no_etag' parameter is deprecated and will be" - " removed in Werkzeug 2.1.", - DeprecationWarning, - stacklevel=2, - ) - - self.add_etag() - - def get_wsgi_headers(self, environ: "WSGIEnvironment") -> Headers: - """This is automatically called right before the response is started - and returns headers modified for the given environment. It returns a - copy of the headers from the response with some modifications applied - if necessary. - - For example the location header (if present) is joined with the root - URL of the environment. Also the content length is automatically set - to zero here for certain status codes. - - .. versionchanged:: 0.6 - Previously that function was called `fix_headers` and modified - the response object in place. Also since 0.6, IRIs in location - and content-location headers are handled properly. - - Also starting with 0.6, Werkzeug will attempt to set the content - length if it is able to figure it out on its own. This is the - case if all the strings in the response iterable are already - encoded and the iterable is buffered. - - :param environ: the WSGI environment of the request. - :return: returns a new :class:`~werkzeug.datastructures.Headers` - object. - """ - headers = Headers(self.headers) - location: t.Optional[str] = None - content_location: t.Optional[str] = None - content_length: t.Optional[t.Union[str, int]] = None - status = self.status_code - - # iterate over the headers to find all values in one go. Because - # get_wsgi_headers is used each response that gives us a tiny - # speedup. - for key, value in headers: - ikey = key.lower() - if ikey == "location": - location = value - elif ikey == "content-location": - content_location = value - elif ikey == "content-length": - content_length = value - - # make sure the location header is an absolute URL - if location is not None: - old_location = location - if isinstance(location, str): - # Safe conversion is necessary here as we might redirect - # to a broken URI scheme (for instance itms-services). - location = iri_to_uri(location, safe_conversion=True) - - if self.autocorrect_location_header: - current_url = get_current_url(environ, strip_querystring=True) - if isinstance(current_url, str): - current_url = iri_to_uri(current_url) - location = url_join(current_url, location) - if location != old_location: - headers["Location"] = location - - # make sure the content location is a URL - if content_location is not None and isinstance(content_location, str): - headers["Content-Location"] = iri_to_uri(content_location) - - if 100 <= status < 200 or status == 204: - # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a - # Content-Length header field in any response with a status - # code of 1xx (Informational) or 204 (No Content)." - headers.remove("Content-Length") - elif status == 304: - remove_entity_headers(headers) - - # if we can determine the content length automatically, we - # should try to do that. But only if this does not involve - # flattening the iterator or encoding of strings in the - # response. We however should not do that if we have a 304 - # response. - if ( - self.automatically_set_content_length - and self.is_sequence - and content_length is None - and status not in (204, 304) - and not (100 <= status < 200) - ): - try: - content_length = sum(len(_to_bytes(x, "ascii")) for x in self.response) - except UnicodeError: - # Something other than bytes, can't safely figure out - # the length of the response. - pass - else: - headers["Content-Length"] = str(content_length) - - return headers - - def get_app_iter(self, environ: "WSGIEnvironment") -> t.Iterable[bytes]: - """Returns the application iterator for the given environ. Depending - on the request method and the current status code the return value - might be an empty response rather than the one from the response. - - If the request method is `HEAD` or the status code is in a range - where the HTTP specification requires an empty response, an empty - iterable is returned. - - .. versionadded:: 0.6 - - :param environ: the WSGI environment of the request. - :return: a response iterable. - """ - status = self.status_code - if ( - environ["REQUEST_METHOD"] == "HEAD" - or 100 <= status < 200 - or status in (204, 304) - ): - iterable: t.Iterable[bytes] = () - elif self.direct_passthrough: - if __debug__: - _warn_if_string(self.response) - return self.response # type: ignore - else: - iterable = self.iter_encoded() - return ClosingIterator(iterable, self.close) - - def get_wsgi_response( - self, environ: "WSGIEnvironment" - ) -> t.Tuple[t.Iterable[bytes], str, t.List[t.Tuple[str, str]]]: - """Returns the final WSGI response as tuple. The first item in - the tuple is the application iterator, the second the status and - the third the list of headers. The response returned is created - specially for the given environment. For example if the request - method in the WSGI environment is ``'HEAD'`` the response will - be empty and only the headers and status code will be present. - - .. versionadded:: 0.6 - - :param environ: the WSGI environment of the request. - :return: an ``(app_iter, status, headers)`` tuple. - """ - headers = self.get_wsgi_headers(environ) - app_iter = self.get_app_iter(environ) - return app_iter, self.status, headers.to_wsgi_list() - - def __call__( - self, environ: "WSGIEnvironment", start_response: "StartResponse" - ) -> t.Iterable[bytes]: - """Process this response as WSGI application. - - :param environ: the WSGI environment. - :param start_response: the response callable provided by the WSGI - server. - :return: an application iterator - """ - app_iter, status, headers = self.get_wsgi_response(environ) - start_response(status, headers) - return app_iter - - # JSON - - #: A module or other object that has ``dumps`` and ``loads`` - #: functions that match the API of the built-in :mod:`json` module. - json_module = json - - @property - def json(self) -> t.Optional[t.Any]: - """The parsed JSON data if :attr:`mimetype` indicates JSON - (:mimetype:`application/json`, see :attr:`is_json`). - - Calls :meth:`get_json` with default arguments. - """ - return self.get_json() - - def get_json(self, force: bool = False, silent: bool = False) -> t.Optional[t.Any]: - """Parse :attr:`data` as JSON. Useful during testing. - - If the mimetype does not indicate JSON - (:mimetype:`application/json`, see :attr:`is_json`), this - returns ``None``. - - Unlike :meth:`Request.get_json`, the result is not cached. - - :param force: Ignore the mimetype and always try to parse JSON. - :param silent: Silence parsing errors and return ``None`` - instead. - """ - if not (force or self.is_json): - return None - - data = self.get_data() - - try: - return self.json_module.loads(data) - except ValueError: - if not silent: - raise - - return None - - # Stream - - @cached_property - def stream(self) -> "ResponseStream": - """The response iterable as write-only stream.""" - return ResponseStream(self) - - def _wrap_range_response(self, start: int, length: int) -> None: - """Wrap existing Response in case of Range Request context.""" - if self.status_code == 206: - self.response = _RangeWrapper(self.response, start, length) # type: ignore - - def _is_range_request_processable(self, environ: "WSGIEnvironment") -> bool: - """Return ``True`` if `Range` header is present and if underlying - resource is considered unchanged when compared with `If-Range` header. - """ - return ( - "HTTP_IF_RANGE" not in environ - or not is_resource_modified( - environ, - self.headers.get("etag"), - None, - self.headers.get("last-modified"), - ignore_if_range=False, - ) - ) and "HTTP_RANGE" in environ - - def _process_range_request( - self, - environ: "WSGIEnvironment", - complete_length: t.Optional[int] = None, - accept_ranges: t.Optional[t.Union[bool, str]] = None, - ) -> bool: - """Handle Range Request related headers (RFC7233). If `Accept-Ranges` - header is valid, and Range Request is processable, we set the headers - as described by the RFC, and wrap the underlying response in a - RangeWrapper. - - Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. - - :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` - if `Range` header could not be parsed or satisfied. - - .. versionchanged:: 2.0 - Returns ``False`` if the length is 0. - """ - from ..exceptions import RequestedRangeNotSatisfiable - - if ( - accept_ranges is None - or complete_length is None - or complete_length == 0 - or not self._is_range_request_processable(environ) - ): - return False - - parsed_range = parse_range_header(environ.get("HTTP_RANGE")) - - if parsed_range is None: - raise RequestedRangeNotSatisfiable(complete_length) - - range_tuple = parsed_range.range_for_length(complete_length) - content_range_header = parsed_range.to_content_range_header(complete_length) - - if range_tuple is None or content_range_header is None: - raise RequestedRangeNotSatisfiable(complete_length) - - content_length = range_tuple[1] - range_tuple[0] - self.headers["Content-Length"] = content_length - self.headers["Accept-Ranges"] = accept_ranges - self.content_range = content_range_header # type: ignore - self.status_code = 206 - self._wrap_range_response(range_tuple[0], content_length) - return True - - def make_conditional( - self, - request_or_environ: "WSGIEnvironment", - accept_ranges: t.Union[bool, str] = False, - complete_length: t.Optional[int] = None, - ) -> "Response": - """Make the response conditional to the request. This method works - best if an etag was defined for the response already. The `add_etag` - method can be used to do that. If called without etag just the date - header is set. - - This does nothing if the request method in the request or environ is - anything but GET or HEAD. - - For optimal performance when handling range requests, it's recommended - that your response data object implements `seekable`, `seek` and `tell` - methods as described by :py:class:`io.IOBase`. Objects returned by - :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods. - - It does not remove the body of the response because that's something - the :meth:`__call__` function does for us automatically. - - Returns self so that you can do ``return resp.make_conditional(req)`` - but modifies the object in-place. - - :param request_or_environ: a request object or WSGI environment to be - used to make the response conditional - against. - :param accept_ranges: This parameter dictates the value of - `Accept-Ranges` header. If ``False`` (default), - the header is not set. If ``True``, it will be set - to ``"bytes"``. If ``None``, it will be set to - ``"none"``. If it's a string, it will use this - value. - :param complete_length: Will be used only in valid Range Requests. - It will set `Content-Range` complete length - value and compute `Content-Length` real value. - This parameter is mandatory for successful - Range Requests completion. - :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` - if `Range` header could not be parsed or satisfied. - - .. versionchanged:: 2.0 - Range processing is skipped if length is 0 instead of - raising a 416 Range Not Satisfiable error. - """ - environ = _get_environ(request_or_environ) - if environ["REQUEST_METHOD"] in ("GET", "HEAD"): - # if the date is not in the headers, add it now. We however - # will not override an already existing header. Unfortunately - # this header will be overriden by many WSGI servers including - # wsgiref. - if "date" not in self.headers: - self.headers["Date"] = http_date() - accept_ranges = _clean_accept_ranges(accept_ranges) - is206 = self._process_range_request(environ, complete_length, accept_ranges) - if not is206 and not is_resource_modified( - environ, - self.headers.get("etag"), - None, - self.headers.get("last-modified"), - ): - if parse_etags(environ.get("HTTP_IF_MATCH")): - self.status_code = 412 - else: - self.status_code = 304 - if ( - self.automatically_set_content_length - and "content-length" not in self.headers - ): - length = self.calculate_content_length() - if length is not None: - self.headers["Content-Length"] = length - return self - - def add_etag(self, overwrite: bool = False, weak: bool = False) -> None: - """Add an etag for the current response if there is none yet. - - .. versionchanged:: 2.0 - SHA-1 is used to generate the value. MD5 may not be - available in some environments. - """ - if overwrite or "etag" not in self.headers: - self.set_etag(generate_etag(self.get_data()), weak) - - -class ResponseStream: - """A file descriptor like object used by the :class:`ResponseStreamMixin` to - represent the body of the stream. It directly pushes into the response - iterable of the response object. - """ - - mode = "wb+" - - def __init__(self, response: Response): - self.response = response - self.closed = False - - def write(self, value: bytes) -> int: - if self.closed: - raise ValueError("I/O operation on closed file") - self.response._ensure_sequence(mutable=True) - self.response.response.append(value) # type: ignore - self.response.headers.pop("Content-Length", None) - return len(value) - - def writelines(self, seq: t.Iterable[bytes]) -> None: - for item in seq: - self.write(item) - - def close(self) -> None: - self.closed = True - - def flush(self) -> None: - if self.closed: - raise ValueError("I/O operation on closed file") - - def isatty(self) -> bool: - if self.closed: - raise ValueError("I/O operation on closed file") - return False - - def tell(self) -> int: - self.response._ensure_sequence() - return sum(map(len, self.response.response)) - - @property - def encoding(self) -> str: - return self.response.charset - - -class ResponseStreamMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'ResponseStreamMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Response' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py b/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py deleted file mode 100644 index 184ffd0..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wrappers/user_agent.py +++ /dev/null @@ -1,14 +0,0 @@ -import typing as t -import warnings - - -class UserAgentMixin: - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - warnings.warn( - "'UserAgentMixin' is deprecated and will be removed in" - " Werkzeug 2.1. 'Request' now includes the functionality" - " directly.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__(*args, **kwargs) # type: ignore diff --git a/venv/lib/python3.8/site-packages/werkzeug/wsgi.py b/venv/lib/python3.8/site-packages/werkzeug/wsgi.py deleted file mode 100644 index 9cfa74d..0000000 --- a/venv/lib/python3.8/site-packages/werkzeug/wsgi.py +++ /dev/null @@ -1,982 +0,0 @@ -import io -import re -import typing as t -from functools import partial -from functools import update_wrapper -from itertools import chain - -from ._internal import _make_encode_wrapper -from ._internal import _to_bytes -from ._internal import _to_str -from .sansio import utils as _sansio_utils -from .sansio.utils import host_is_trusted # noqa: F401 # Imported as part of API -from .urls import _URLTuple -from .urls import uri_to_iri -from .urls import url_join -from .urls import url_parse -from .urls import url_quote - -if t.TYPE_CHECKING: - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - -def responder(f: t.Callable[..., "WSGIApplication"]) -> "WSGIApplication": - """Marks a function as responder. Decorate a function with it and it - will automatically call the return value as WSGI application. - - Example:: - - @responder - def application(environ, start_response): - return Response('Hello World!') - """ - return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) - - -def get_current_url( - environ: "WSGIEnvironment", - root_only: bool = False, - strip_querystring: bool = False, - host_only: bool = False, - trusted_hosts: t.Optional[t.Iterable[str]] = None, -) -> str: - """Recreate the URL for a request from the parts in a WSGI - environment. - - The URL is an IRI, not a URI, so it may contain Unicode characters. - Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. - - :param environ: The WSGI environment to get the URL parts from. - :param root_only: Only build the root path, don't include the - remaining path or query string. - :param strip_querystring: Don't include the query string. - :param host_only: Only build the scheme and host. - :param trusted_hosts: A list of trusted host names to validate the - host against. - """ - parts = { - "scheme": environ["wsgi.url_scheme"], - "host": get_host(environ, trusted_hosts), - } - - if not host_only: - parts["root_path"] = environ.get("SCRIPT_NAME", "") - - if not root_only: - parts["path"] = environ.get("PATH_INFO", "") - - if not strip_querystring: - parts["query_string"] = environ.get("QUERY_STRING", "").encode("latin1") - - return _sansio_utils.get_current_url(**parts) - - -def _get_server( - environ: "WSGIEnvironment", -) -> t.Optional[t.Tuple[str, t.Optional[int]]]: - name = environ.get("SERVER_NAME") - - if name is None: - return None - - try: - port: t.Optional[int] = int(environ.get("SERVER_PORT", None)) - except (TypeError, ValueError): - # unix socket - port = None - - return name, port - - -def get_host( - environ: "WSGIEnvironment", trusted_hosts: t.Optional[t.Iterable[str]] = None -) -> str: - """Return the host for the given WSGI environment. - - The ``Host`` header is preferred, then ``SERVER_NAME`` if it's not - set. The returned host will only contain the port if it is different - than the standard port for the protocol. - - Optionally, verify that the host is trusted using - :func:`host_is_trusted` and raise a - :exc:`~werkzeug.exceptions.SecurityError` if it is not. - - :param environ: A WSGI environment dict. - :param trusted_hosts: A list of trusted host names. - - :return: Host, with port if necessary. - :raise ~werkzeug.exceptions.SecurityError: If the host is not - trusted. - """ - return _sansio_utils.get_host( - environ["wsgi.url_scheme"], - environ.get("HTTP_HOST"), - _get_server(environ), - trusted_hosts, - ) - - -def get_content_length(environ: "WSGIEnvironment") -> t.Optional[int]: - """Returns the content length from the WSGI environment as - integer. If it's not available or chunked transfer encoding is used, - ``None`` is returned. - - .. versionadded:: 0.9 - - :param environ: the WSGI environ to fetch the content length from. - """ - if environ.get("HTTP_TRANSFER_ENCODING", "") == "chunked": - return None - - content_length = environ.get("CONTENT_LENGTH") - if content_length is not None: - try: - return max(0, int(content_length)) - except (ValueError, TypeError): - pass - return None - - -def get_input_stream( - environ: "WSGIEnvironment", safe_fallback: bool = True -) -> t.IO[bytes]: - """Returns the input stream from the WSGI environment and wraps it - in the most sensible way possible. The stream returned is not the - raw WSGI stream in most cases but one that is safe to read from - without taking into account the content length. - - If content length is not set, the stream will be empty for safety reasons. - If the WSGI server supports chunked or infinite streams, it should set - the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. - - .. versionadded:: 0.9 - - :param environ: the WSGI environ to fetch the stream from. - :param safe_fallback: use an empty stream as a safe fallback when the - content length is not set. Disabling this allows infinite streams, - which can be a denial-of-service risk. - """ - stream = t.cast(t.IO[bytes], environ["wsgi.input"]) - content_length = get_content_length(environ) - - # A wsgi extension that tells us if the input is terminated. In - # that case we return the stream unchanged as we know we can safely - # read it until the end. - if environ.get("wsgi.input_terminated"): - return stream - - # If the request doesn't specify a content length, returning the stream is - # potentially dangerous because it could be infinite, malicious or not. If - # safe_fallback is true, return an empty stream instead for safety. - if content_length is None: - return io.BytesIO() if safe_fallback else stream - - # Otherwise limit the stream to the content length - return t.cast(t.IO[bytes], LimitedStream(stream, content_length)) - - -def get_query_string(environ: "WSGIEnvironment") -> str: - """Returns the ``QUERY_STRING`` from the WSGI environment. This also - takes care of the WSGI decoding dance. The string returned will be - restricted to ASCII characters. - - :param environ: WSGI environment to get the query string from. - - .. versionadded:: 0.9 - """ - qs = environ.get("QUERY_STRING", "").encode("latin1") - # QUERY_STRING really should be ascii safe but some browsers - # will send us some unicode stuff (I am looking at you IE). - # In that case we want to urllib quote it badly. - return url_quote(qs, safe=":&%=+$!*'(),") - - -def get_path_info( - environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" -) -> str: - """Return the ``PATH_INFO`` from the WSGI environment and decode it - unless ``charset`` is ``None``. - - :param environ: WSGI environment to get the path from. - :param charset: The charset for the path info, or ``None`` if no - decoding should be performed. - :param errors: The decoding error handling. - - .. versionadded:: 0.9 - """ - path = environ.get("PATH_INFO", "").encode("latin1") - return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore - - -def get_script_name( - environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" -) -> str: - """Return the ``SCRIPT_NAME`` from the WSGI environment and decode - it unless `charset` is set to ``None``. - - :param environ: WSGI environment to get the path from. - :param charset: The charset for the path, or ``None`` if no decoding - should be performed. - :param errors: The decoding error handling. - - .. versionadded:: 0.9 - """ - path = environ.get("SCRIPT_NAME", "").encode("latin1") - return _to_str(path, charset, errors, allow_none_charset=True) # type: ignore - - -def pop_path_info( - environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" -) -> t.Optional[str]: - """Removes and returns the next segment of `PATH_INFO`, pushing it onto - `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. - - If the `charset` is set to `None` bytes are returned. - - If there are empty segments (``'/foo//bar``) these are ignored but - properly pushed to the `SCRIPT_NAME`: - - >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} - >>> pop_path_info(env) - 'a' - >>> env['SCRIPT_NAME'] - '/foo/a' - >>> pop_path_info(env) - 'b' - >>> env['SCRIPT_NAME'] - '/foo/a/b' - - .. versionadded:: 0.5 - - .. versionchanged:: 0.9 - The path is now decoded and a charset and encoding - parameter can be provided. - - :param environ: the WSGI environment that is modified. - :param charset: The ``encoding`` parameter passed to - :func:`bytes.decode`. - :param errors: The ``errors`` paramater passed to - :func:`bytes.decode`. - """ - path = environ.get("PATH_INFO") - if not path: - return None - - script_name = environ.get("SCRIPT_NAME", "") - - # shift multiple leading slashes over - old_path = path - path = path.lstrip("/") - if path != old_path: - script_name += "/" * (len(old_path) - len(path)) - - if "/" not in path: - environ["PATH_INFO"] = "" - environ["SCRIPT_NAME"] = script_name + path - rv = path.encode("latin1") - else: - segment, path = path.split("/", 1) - environ["PATH_INFO"] = f"/{path}" - environ["SCRIPT_NAME"] = script_name + segment - rv = segment.encode("latin1") - - return _to_str(rv, charset, errors, allow_none_charset=True) # type: ignore - - -def peek_path_info( - environ: "WSGIEnvironment", charset: str = "utf-8", errors: str = "replace" -) -> t.Optional[str]: - """Returns the next segment on the `PATH_INFO` or `None` if there - is none. Works like :func:`pop_path_info` without modifying the - environment: - - >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} - >>> peek_path_info(env) - 'a' - >>> peek_path_info(env) - 'a' - - If the `charset` is set to `None` bytes are returned. - - .. versionadded:: 0.5 - - .. versionchanged:: 0.9 - The path is now decoded and a charset and encoding - parameter can be provided. - - :param environ: the WSGI environment that is checked. - """ - segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1) - if segments: - return _to_str( # type: ignore - segments[0].encode("latin1"), charset, errors, allow_none_charset=True - ) - return None - - -def extract_path_info( - environ_or_baseurl: t.Union[str, "WSGIEnvironment"], - path_or_url: t.Union[str, _URLTuple], - charset: str = "utf-8", - errors: str = "werkzeug.url_quote", - collapse_http_schemes: bool = True, -) -> t.Optional[str]: - """Extracts the path info from the given URL (or WSGI environment) and - path. The path info returned is a string. The URLs might also be IRIs. - - If the path info could not be determined, `None` is returned. - - Some examples: - - >>> extract_path_info('http://example.com/app', '/app/hello') - '/hello' - >>> extract_path_info('http://example.com/app', - ... 'https://example.com/app/hello') - '/hello' - >>> extract_path_info('http://example.com/app', - ... 'https://example.com/app/hello', - ... collapse_http_schemes=False) is None - True - - Instead of providing a base URL you can also pass a WSGI environment. - - :param environ_or_baseurl: a WSGI environment dict, a base URL or - base IRI. This is the root of the - application. - :param path_or_url: an absolute path from the server root, a - relative path (in which case it's the path info) - or a full URL. - :param charset: the charset for byte data in URLs - :param errors: the error handling on decode - :param collapse_http_schemes: if set to `False` the algorithm does - not assume that http and https on the - same server point to the same - resource. - - .. versionchanged:: 0.15 - The ``errors`` parameter defaults to leaving invalid bytes - quoted instead of replacing them. - - .. versionadded:: 0.6 - """ - - def _normalize_netloc(scheme: str, netloc: str) -> str: - parts = netloc.split("@", 1)[-1].split(":", 1) - port: t.Optional[str] - - if len(parts) == 2: - netloc, port = parts - if (scheme == "http" and port == "80") or ( - scheme == "https" and port == "443" - ): - port = None - else: - netloc = parts[0] - port = None - - if port is not None: - netloc += f":{port}" - - return netloc - - # make sure whatever we are working on is a IRI and parse it - path = uri_to_iri(path_or_url, charset, errors) - if isinstance(environ_or_baseurl, dict): - environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) - base_iri = uri_to_iri(environ_or_baseurl, charset, errors) - base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] - cur_scheme, cur_netloc, cur_path = url_parse(url_join(base_iri, path))[:3] - - # normalize the network location - base_netloc = _normalize_netloc(base_scheme, base_netloc) - cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) - - # is that IRI even on a known HTTP scheme? - if collapse_http_schemes: - for scheme in base_scheme, cur_scheme: - if scheme not in ("http", "https"): - return None - else: - if not (base_scheme in ("http", "https") and base_scheme == cur_scheme): - return None - - # are the netlocs compatible? - if base_netloc != cur_netloc: - return None - - # are we below the application path? - base_path = base_path.rstrip("/") - if not cur_path.startswith(base_path): - return None - - return f"/{cur_path[len(base_path) :].lstrip('/')}" - - -class ClosingIterator: - """The WSGI specification requires that all middlewares and gateways - respect the `close` callback of the iterable returned by the application. - Because it is useful to add another close action to a returned iterable - and adding a custom iterable is a boring task this class can be used for - that:: - - return ClosingIterator(app(environ, start_response), [cleanup_session, - cleanup_locals]) - - If there is just one close function it can be passed instead of the list. - - A closing iterator is not needed if the application uses response objects - and finishes the processing if the response is started:: - - try: - return response(environ, start_response) - finally: - cleanup_session() - cleanup_locals() - """ - - def __init__( - self, - iterable: t.Iterable[bytes], - callbacks: t.Optional[ - t.Union[t.Callable[[], None], t.Iterable[t.Callable[[], None]]] - ] = None, - ) -> None: - iterator = iter(iterable) - self._next = t.cast(t.Callable[[], bytes], partial(next, iterator)) - if callbacks is None: - callbacks = [] - elif callable(callbacks): - callbacks = [callbacks] - else: - callbacks = list(callbacks) - iterable_close = getattr(iterable, "close", None) - if iterable_close: - callbacks.insert(0, iterable_close) - self._callbacks = callbacks - - def __iter__(self) -> "ClosingIterator": - return self - - def __next__(self) -> bytes: - return self._next() - - def close(self) -> None: - for callback in self._callbacks: - callback() - - -def wrap_file( - environ: "WSGIEnvironment", file: t.IO[bytes], buffer_size: int = 8192 -) -> t.Iterable[bytes]: - """Wraps a file. This uses the WSGI server's file wrapper if available - or otherwise the generic :class:`FileWrapper`. - - .. versionadded:: 0.5 - - If the file wrapper from the WSGI server is used it's important to not - iterate over it from inside the application but to pass it through - unchanged. If you want to pass out a file wrapper inside a response - object you have to set :attr:`Response.direct_passthrough` to `True`. - - More information about file wrappers are available in :pep:`333`. - - :param file: a :class:`file`-like object with a :meth:`~file.read` method. - :param buffer_size: number of bytes for one iteration. - """ - return environ.get("wsgi.file_wrapper", FileWrapper)( # type: ignore - file, buffer_size - ) - - -class FileWrapper: - """This class can be used to convert a :class:`file`-like object into - an iterable. It yields `buffer_size` blocks until the file is fully - read. - - You should not use this class directly but rather use the - :func:`wrap_file` function that uses the WSGI server's file wrapper - support if it's available. - - .. versionadded:: 0.5 - - If you're using this object together with a :class:`Response` you have - to use the `direct_passthrough` mode. - - :param file: a :class:`file`-like object with a :meth:`~file.read` method. - :param buffer_size: number of bytes for one iteration. - """ - - def __init__(self, file: t.IO[bytes], buffer_size: int = 8192) -> None: - self.file = file - self.buffer_size = buffer_size - - def close(self) -> None: - if hasattr(self.file, "close"): - self.file.close() - - def seekable(self) -> bool: - if hasattr(self.file, "seekable"): - return self.file.seekable() - if hasattr(self.file, "seek"): - return True - return False - - def seek(self, *args: t.Any) -> None: - if hasattr(self.file, "seek"): - self.file.seek(*args) - - def tell(self) -> t.Optional[int]: - if hasattr(self.file, "tell"): - return self.file.tell() - return None - - def __iter__(self) -> "FileWrapper": - return self - - def __next__(self) -> bytes: - data = self.file.read(self.buffer_size) - if data: - return data - raise StopIteration() - - -class _RangeWrapper: - # private for now, but should we make it public in the future ? - - """This class can be used to convert an iterable object into - an iterable that will only yield a piece of the underlying content. - It yields blocks until the underlying stream range is fully read. - The yielded blocks will have a size that can't exceed the original - iterator defined block size, but that can be smaller. - - If you're using this object together with a :class:`Response` you have - to use the `direct_passthrough` mode. - - :param iterable: an iterable object with a :meth:`__next__` method. - :param start_byte: byte from which read will start. - :param byte_range: how many bytes to read. - """ - - def __init__( - self, - iterable: t.Union[t.Iterable[bytes], t.IO[bytes]], - start_byte: int = 0, - byte_range: t.Optional[int] = None, - ): - self.iterable = iter(iterable) - self.byte_range = byte_range - self.start_byte = start_byte - self.end_byte = None - - if byte_range is not None: - self.end_byte = start_byte + byte_range - - self.read_length = 0 - self.seekable = ( - hasattr(iterable, "seekable") and iterable.seekable() # type: ignore - ) - self.end_reached = False - - def __iter__(self) -> "_RangeWrapper": - return self - - def _next_chunk(self) -> bytes: - try: - chunk = next(self.iterable) - self.read_length += len(chunk) - return chunk - except StopIteration: - self.end_reached = True - raise - - def _first_iteration(self) -> t.Tuple[t.Optional[bytes], int]: - chunk = None - if self.seekable: - self.iterable.seek(self.start_byte) # type: ignore - self.read_length = self.iterable.tell() # type: ignore - contextual_read_length = self.read_length - else: - while self.read_length <= self.start_byte: - chunk = self._next_chunk() - if chunk is not None: - chunk = chunk[self.start_byte - self.read_length :] - contextual_read_length = self.start_byte - return chunk, contextual_read_length - - def _next(self) -> bytes: - if self.end_reached: - raise StopIteration() - chunk = None - contextual_read_length = self.read_length - if self.read_length == 0: - chunk, contextual_read_length = self._first_iteration() - if chunk is None: - chunk = self._next_chunk() - if self.end_byte is not None and self.read_length >= self.end_byte: - self.end_reached = True - return chunk[: self.end_byte - contextual_read_length] - return chunk - - def __next__(self) -> bytes: - chunk = self._next() - if chunk: - return chunk - self.end_reached = True - raise StopIteration() - - def close(self) -> None: - if hasattr(self.iterable, "close"): - self.iterable.close() # type: ignore - - -def _make_chunk_iter( - stream: t.Union[t.Iterable[bytes], t.IO[bytes]], - limit: t.Optional[int], - buffer_size: int, -) -> t.Iterator[bytes]: - """Helper for the line and chunk iter functions.""" - if isinstance(stream, (bytes, bytearray, str)): - raise TypeError( - "Passed a string or byte object instead of true iterator or stream." - ) - if not hasattr(stream, "read"): - for item in stream: - if item: - yield item - return - stream = t.cast(t.IO[bytes], stream) - if not isinstance(stream, LimitedStream) and limit is not None: - stream = t.cast(t.IO[bytes], LimitedStream(stream, limit)) - _read = stream.read - while True: - item = _read(buffer_size) - if not item: - break - yield item - - -def make_line_iter( - stream: t.Union[t.Iterable[bytes], t.IO[bytes]], - limit: t.Optional[int] = None, - buffer_size: int = 10 * 1024, - cap_at_buffer: bool = False, -) -> t.Iterator[bytes]: - """Safely iterates line-based over an input stream. If the input stream - is not a :class:`LimitedStream` the `limit` parameter is mandatory. - - This uses the stream's :meth:`~file.read` method internally as opposite - to the :meth:`~file.readline` method that is unsafe and can only be used - in violation of the WSGI specification. The same problem applies to the - `__iter__` function of the input stream which calls :meth:`~file.readline` - without arguments. - - If you need line-by-line processing it's strongly recommended to iterate - over the input stream using this helper function. - - .. versionchanged:: 0.8 - This function now ensures that the limit was reached. - - .. versionadded:: 0.9 - added support for iterators as input stream. - - .. versionadded:: 0.11.10 - added support for the `cap_at_buffer` parameter. - - :param stream: the stream or iterate to iterate over. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is a :class:`LimitedStream`. - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, "") - if not first_item: - return - - s = _make_encode_wrapper(first_item) - empty = t.cast(bytes, s("")) - cr = t.cast(bytes, s("\r")) - lf = t.cast(bytes, s("\n")) - crlf = t.cast(bytes, s("\r\n")) - - _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) - - def _iter_basic_lines() -> t.Iterator[bytes]: - _join = empty.join - buffer: t.List[bytes] = [] - while True: - new_data = next(_iter, "") - if not new_data: - break - new_buf: t.List[bytes] = [] - buf_size = 0 - for item in t.cast( - t.Iterator[bytes], chain(buffer, new_data.splitlines(True)) - ): - new_buf.append(item) - buf_size += len(item) - if item and item[-1:] in crlf: - yield _join(new_buf) - new_buf = [] - elif cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buffer = new_buf - if buffer: - yield _join(buffer) - - # This hackery is necessary to merge 'foo\r' and '\n' into one item - # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. - previous = empty - for item in _iter_basic_lines(): - if item == lf and previous[-1:] == cr: - previous += item - item = empty - if previous: - yield previous - previous = item - if previous: - yield previous - - -def make_chunk_iter( - stream: t.Union[t.Iterable[bytes], t.IO[bytes]], - separator: bytes, - limit: t.Optional[int] = None, - buffer_size: int = 10 * 1024, - cap_at_buffer: bool = False, -) -> t.Iterator[bytes]: - """Works like :func:`make_line_iter` but accepts a separator - which divides chunks. If you want newline based processing - you should use :func:`make_line_iter` instead as it - supports arbitrary newline markers. - - .. versionadded:: 0.8 - - .. versionadded:: 0.9 - added support for iterators as input stream. - - .. versionadded:: 0.11.10 - added support for the `cap_at_buffer` parameter. - - :param stream: the stream or iterate to iterate over. - :param separator: the separator that divides chunks. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is otherwise already limited). - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, b"") - if not first_item: - return - - _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) - if isinstance(first_item, str): - separator = _to_str(separator) - _split = re.compile(f"({re.escape(separator)})").split - _join = "".join - else: - separator = _to_bytes(separator) - _split = re.compile(b"(" + re.escape(separator) + b")").split - _join = b"".join - - buffer: t.List[bytes] = [] - while True: - new_data = next(_iter, b"") - if not new_data: - break - chunks = _split(new_data) - new_buf: t.List[bytes] = [] - buf_size = 0 - for item in chain(buffer, chunks): - if item == separator: - yield _join(new_buf) - new_buf = [] - buf_size = 0 - else: - buf_size += len(item) - new_buf.append(item) - - if cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buf_size = len(rv) - - buffer = new_buf - if buffer: - yield _join(buffer) - - -class LimitedStream(io.IOBase): - """Wraps a stream so that it doesn't read more than n bytes. If the - stream is exhausted and the caller tries to get more bytes from it - :func:`on_exhausted` is called which by default returns an empty - string. The return value of that function is forwarded - to the reader function. So if it returns an empty string - :meth:`read` will return an empty string as well. - - The limit however must never be higher than what the stream can - output. Otherwise :meth:`readlines` will try to read past the - limit. - - .. admonition:: Note on WSGI compliance - - calls to :meth:`readline` and :meth:`readlines` are not - WSGI compliant because it passes a size argument to the - readline methods. Unfortunately the WSGI PEP is not safely - implementable without a size argument to :meth:`readline` - because there is no EOF marker in the stream. As a result - of that the use of :meth:`readline` is discouraged. - - For the same reason iterating over the :class:`LimitedStream` - is not portable. It internally calls :meth:`readline`. - - We strongly suggest using :meth:`read` only or using the - :func:`make_line_iter` which safely iterates line-based - over a WSGI input stream. - - :param stream: the stream to wrap. - :param limit: the limit for the stream, must not be longer than - what the string can provide if the stream does not - end with `EOF` (like `wsgi.input`) - """ - - def __init__(self, stream: t.IO[bytes], limit: int) -> None: - self._read = stream.read - self._readline = stream.readline - self._pos = 0 - self.limit = limit - - def __iter__(self) -> "LimitedStream": - return self - - @property - def is_exhausted(self) -> bool: - """If the stream is exhausted this attribute is `True`.""" - return self._pos >= self.limit - - def on_exhausted(self) -> bytes: - """This is called when the stream tries to read past the limit. - The return value of this function is returned from the reading - function. - """ - # Read null bytes from the stream so that we get the - # correct end of stream marker. - return self._read(0) - - def on_disconnect(self) -> bytes: - """What should happen if a disconnect is detected? The return - value of this function is returned from read functions in case - the client went away. By default a - :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised. - """ - from .exceptions import ClientDisconnected - - raise ClientDisconnected() - - def exhaust(self, chunk_size: int = 1024 * 64) -> None: - """Exhaust the stream. This consumes all the data left until the - limit is reached. - - :param chunk_size: the size for a chunk. It will read the chunk - until the stream is exhausted and throw away - the results. - """ - to_read = self.limit - self._pos - chunk = chunk_size - while to_read > 0: - chunk = min(to_read, chunk) - self.read(chunk) - to_read -= chunk - - def read(self, size: t.Optional[int] = None) -> bytes: - """Read `size` bytes or if size is not provided everything is read. - - :param size: the number of bytes read. - """ - if self._pos >= self.limit: - return self.on_exhausted() - if size is None or size == -1: # -1 is for consistence with file - size = self.limit - to_read = min(self.limit - self._pos, size) - try: - read = self._read(to_read) - except (OSError, ValueError): - return self.on_disconnect() - if to_read and len(read) != to_read: - return self.on_disconnect() - self._pos += len(read) - return read - - def readline(self, size: t.Optional[int] = None) -> bytes: - """Reads one line from the stream.""" - if self._pos >= self.limit: - return self.on_exhausted() - if size is None: - size = self.limit - self._pos - else: - size = min(size, self.limit - self._pos) - try: - line = self._readline(size) - except (ValueError, OSError): - return self.on_disconnect() - if size and not line: - return self.on_disconnect() - self._pos += len(line) - return line - - def readlines(self, size: t.Optional[int] = None) -> t.List[bytes]: - """Reads a file into a list of strings. It calls :meth:`readline` - until the file is read to the end. It does support the optional - `size` argument if the underlying stream supports it for - `readline`. - """ - last_pos = self._pos - result = [] - if size is not None: - end = min(self.limit, last_pos + size) - else: - end = self.limit - while True: - if size is not None: - size -= last_pos - self._pos - if self._pos >= end: - break - result.append(self.readline(size)) - if size is not None: - last_pos = self._pos - return result - - def tell(self) -> int: - """Returns the position of the stream. - - .. versionadded:: 0.9 - """ - return self._pos - - def __next__(self) -> bytes: - line = self.readline() - if not line: - raise StopIteration() - return line - - def readable(self) -> bool: - return True diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg deleted file mode 100644 index 8a88747..0000000 --- a/venv/pyvenv.cfg +++ /dev/null @@ -1,3 +0,0 @@ -home = /Users/indraneel/opt/anaconda3/bin -include-system-site-packages = false -version = 3.8.5