diff --git a/Agent/start-daemon.cmd b/Agent/start-daemon.cmd index d308709d..6ddca62e 100755 --- a/Agent/start-daemon.cmd +++ b/Agent/start-daemon.cmd @@ -1,4 +1,5 @@ -cd %~dp0 +chcp 65001 +cd /d "%~dp0" taskkill /im "orpa-agent.exe" /F /fi "username eq %username%" copy /Y ..\Resources\WPy64-3720\python-3.7.2.amd64\pythonw.exe ..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-agent.exe .\..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-agent.exe "config.py" diff --git a/Agent/start.cmd b/Agent/start.cmd index f3afdf35..16d3de8e 100755 --- a/Agent/start.cmd +++ b/Agent/start.cmd @@ -1,4 +1,5 @@ -cd %~dp0 +chcp 65001 +cd /d "%~dp0" taskkill /im "orpa-agent.exe" /F /fi "username eq %username%" copy /Y ..\Resources\WPy64-3720\python-3.7.2.amd64\python.exe ..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-agent.exe .\..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-agent.exe "config.py" diff --git a/Orchestrator/start.cmd b/Orchestrator/start.cmd index 2fba96d6..2ce50d70 100755 --- a/Orchestrator/start.cmd +++ b/Orchestrator/start.cmd @@ -1,4 +1,5 @@ -cd %~dp0 +chcp 65001 +cd /d "%~dp0" taskkill /im "orpa-orc.exe" /F /fi "username eq %username%" copy /Y ..\Resources\WPy64-3720\python-3.7.2.amd64\python.exe ..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-orc.exe .\..\Resources\WPy64-3720\python-3.7.2.amd64\orpa-orc.exe "config.py" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/LICENSE new file mode 100644 index 00000000..104eebf5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/METADATA new file mode 100644 index 00000000..600c1fe0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 3.6.1 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author: Alex Grönholm +Author-email: alex.gronholm@nextday.fi +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6.2 +License-File: LICENSE +Requires-Dist: idna (>=2.8) +Requires-Dist: sniffio (>=1.1) +Requires-Dist: contextvars ; python_version < "3.7" +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: doc +Requires-Dist: packaging ; extra == 'doc' +Requires-Dist: sphinx-rtd-theme ; extra == 'doc' +Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' +Requires-Dist: hypothesis (>=4.0) ; extra == 'test' +Requires-Dist: pytest (>=7.0) ; extra == 'test' +Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' +Requires-Dist: trustme ; extra == 'test' +Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test' +Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' +Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test' +Provides-Extra: trio +Requires-Dist: trio (>=0.16) ; extra == 'trio' + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/RECORD new file mode 100644 index 00000000..77625c27 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/RECORD @@ -0,0 +1,82 @@ +anyio-3.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-3.6.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-3.6.1.dist-info/METADATA,sha256=cXu3CLppFqT_rBl4Eo2HOb0J1zm6Ltu_tMFuzjuQnew,4654 +anyio-3.6.1.dist-info/RECORD,, +anyio-3.6.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +anyio-3.6.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-3.6.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=M2R8dk6L5gL5lXHArzpSfEn2oH5jMyUKhzyrkRiv2AM,4037 +anyio/__pycache__/__init__.cpython-37.pyc,, +anyio/__pycache__/from_thread.cpython-37.pyc,, +anyio/__pycache__/lowlevel.cpython-37.pyc,, +anyio/__pycache__/pytest_plugin.cpython-37.pyc,, +anyio/__pycache__/to_process.cpython-37.pyc,, +anyio/__pycache__/to_thread.cpython-37.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-37.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-37.pyc,, +anyio/_backends/__pycache__/_trio.cpython-37.pyc,, +anyio/_backends/_asyncio.py,sha256=ZJDvRwfS4wv9WWcqWledNJyl8hx8A8-m9-gSKAJ6nBM,69238 +anyio/_backends/_trio.py,sha256=CebCaqr8Szi6uCnUzwtBRLfUitR5OnDT_wfH-KiqvBQ,29696 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-37.pyc,, +anyio/_core/__pycache__/_compat.cpython-37.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-37.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-37.pyc,, +anyio/_core/__pycache__/_fileio.cpython-37.pyc,, +anyio/_core/__pycache__/_resources.cpython-37.pyc,, +anyio/_core/__pycache__/_signals.cpython-37.pyc,, +anyio/_core/__pycache__/_sockets.cpython-37.pyc,, +anyio/_core/__pycache__/_streams.cpython-37.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-37.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-37.pyc,, +anyio/_core/__pycache__/_tasks.cpython-37.pyc,, +anyio/_core/__pycache__/_testing.cpython-37.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-37.pyc,, +anyio/_core/_compat.py,sha256=X99W70r-O-JLdkKNtbddcIY5H2Nyg3Nk34oUYE9WZRs,5790 +anyio/_core/_eventloop.py,sha256=DRn_hy679LtsJFsPX7dXjDv72bLtSFkTnWY9WVVfgCQ,4108 +anyio/_core/_exceptions.py,sha256=1wqraNldZroYkoyB0HZStAruz_7yDCBaW-4zYwsKj8s,2904 +anyio/_core/_fileio.py,sha256=au82uZXZX4fia8EoZq_E-JDwZFKe6ZtI0J6IkxK8FmQ,18298 +anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399 +anyio/_core/_signals.py,sha256=D4btJN527tAADspKBeNKaCds-ZcEZJP8LWM_MjVuQRA,827 +anyio/_core/_sockets.py,sha256=fW_Cbg6kfw4xgYuVuWbcWrAYspOcDSEjwxVATMzf2fo,19820 +anyio/_core/_streams.py,sha256=gjT5xChJ1OoV8nNinljSv1yW4nqUS-QzZzIydQz3exQ,1494 +anyio/_core/_subprocesses.py,sha256=pcchMI2OII0QSjiVxRiTEz4M0B7TlQPzGurfCuka-xc,5049 +anyio/_core/_synchronization.py,sha256=xOOG4hF9783N6E2IcD3YKiukguA5bPrj6BodDsKNaJY,16822 +anyio/_core/_tasks.py,sha256=ebGLjHvwL6I9aGyPwvCig1drebSVYFzvY3pnN3TsB4o,5273 +anyio/_core/_testing.py,sha256=VZka_yebIhJ6mJ6Vo_ilO3Nbz53ieqg0WBijwciMwdY,2196 +anyio/_core/_typedattr.py,sha256=k5-wBvMlDlKHIpn18INVnXAlGwI3CrAvPmWoceHjnOQ,2534 +anyio/abc/__init__.py,sha256=hMa47CMs5O1twC2bBcSbzwX-3Q08BAgAPTRekQobb3E,2123 +anyio/abc/__pycache__/__init__.cpython-37.pyc,, +anyio/abc/__pycache__/_resources.cpython-37.pyc,, +anyio/abc/__pycache__/_sockets.cpython-37.pyc,, +anyio/abc/__pycache__/_streams.cpython-37.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-37.pyc,, +anyio/abc/__pycache__/_tasks.cpython-37.pyc,, +anyio/abc/__pycache__/_testing.cpython-37.pyc,, +anyio/abc/_resources.py,sha256=js737mWPG6IW0fH8W4Tz9eNWLztse7dKxEC61z934Vk,752 +anyio/abc/_sockets.py,sha256=i1VdcJTLAuRlYeZoL6s5RBSWbX62Cu6ln5YZBL2YrWk,5754 +anyio/abc/_streams.py,sha256=0g70fhKAzbnK0KKmWwRgwmKdApBwduAcVj4TpjSzjzU,6501 +anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071 +anyio/abc/_tasks.py,sha256=mQQd1DANqpySKyehVVPdMfi_UEG49zZUJpt5blunOjg,3119 +anyio/abc/_testing.py,sha256=ifKCUPzcQdHAEGO-weu2GQvzjMQPPIWO24mQ0z6zkdU,1928 +anyio/from_thread.py,sha256=nSq6mafYMqwxKmzdJyISg8cp-AyBj9rxZPMt_b7klSM,16497 +anyio/lowlevel.py,sha256=W4ydshns7f86YuSESFc2igTf46AWMXnGPQGsY_Esl2E,4679 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=kWj2B8BJehePJd1sztRBmJBRh8O4hk1oGSYQRlX5Gr8,5134 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-37.pyc,, +anyio/streams/__pycache__/buffered.cpython-37.pyc,, +anyio/streams/__pycache__/file.cpython-37.pyc,, +anyio/streams/__pycache__/memory.cpython-37.pyc,, +anyio/streams/__pycache__/stapled.cpython-37.pyc,, +anyio/streams/__pycache__/text.cpython-37.pyc,, +anyio/streams/__pycache__/tls.cpython-37.pyc,, +anyio/streams/buffered.py,sha256=FegOSO4Xcxa5SaDfU1A3ZkTTxaPrv6G435Y_giZ8k44,4437 +anyio/streams/file.py,sha256=pujJ-m6BX-gOLnVoZwkE5kh-YDs5Vx9eJFVkvliQ0S4,4353 +anyio/streams/memory.py,sha256=3RGeZoevoGIgBWfD2_X1cqxIPOz-BqQkRf6lUcOnBYc,9209 +anyio/streams/stapled.py,sha256=0E0V15v8M5GVelpHe5RT0S33tQ9hGe4ZCXo_KJEjtt4,4258 +anyio/streams/text.py,sha256=WRFyjsRpBjQKdCmR4ZuzYTEAJqGx2s5oTJmGI1C6Ng0,5014 +anyio/streams/tls.py,sha256=-WXGsMV14XHXAxc38WpBvGusjuY7e449g4UCEHIlnWw,12040 +anyio/to_process.py,sha256=hu0ES3HJC-VEjcdPJMzAzjyTaekaCNToO3coj3jvnus,9247 +anyio/to_thread.py,sha256=VeMQoo8Va2zz0WFk2p123QikDpqk2wYZGw20COC3wqw,2124 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/entry_points.txt new file mode 100644 index 00000000..44dd9bdc --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/top_level.txt new file mode 100644 index 00000000..c77c069e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio-3.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/__init__.py new file mode 100644 index 00000000..6e81178c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/__init__.py @@ -0,0 +1,167 @@ +__all__ = ( + "maybe_async", + "maybe_async_cm", + "run", + "sleep", + "sleep_forever", + "sleep_until", + "current_time", + "get_all_backends", + "get_cancelled_exc_class", + "BrokenResourceError", + "BrokenWorkerProcess", + "BusyResourceError", + "ClosedResourceError", + "DelimiterNotFound", + "EndOfStream", + "ExceptionGroup", + "IncompleteRead", + "TypedAttributeLookupError", + "WouldBlock", + "AsyncFile", + "Path", + "open_file", + "wrap_file", + "aclose_forcefully", + "open_signal_receiver", + "connect_tcp", + "connect_unix", + "create_tcp_listener", + "create_unix_listener", + "create_udp_socket", + "create_connected_udp_socket", + "getaddrinfo", + "getnameinfo", + "wait_socket_readable", + "wait_socket_writable", + "create_memory_object_stream", + "run_process", + "open_process", + "create_lock", + "CapacityLimiter", + "CapacityLimiterStatistics", + "Condition", + "ConditionStatistics", + "Event", + "EventStatistics", + "Lock", + "LockStatistics", + "Semaphore", + "SemaphoreStatistics", + "create_condition", + "create_event", + "create_semaphore", + "create_capacity_limiter", + "open_cancel_scope", + "fail_after", + "move_on_after", + "current_effective_deadline", + "TASK_STATUS_IGNORED", + "CancelScope", + "create_task_group", + "TaskInfo", + "get_current_task", + "get_running_tasks", + "wait_all_tasks_blocked", + "run_sync_in_worker_thread", + "run_async_from_thread", + "run_sync_from_thread", + "current_default_worker_thread_limiter", + "create_blocking_portal", + "start_blocking_portal", + "typed_attribute", + "TypedAttributeSet", + "TypedAttributeProvider", +) + +from typing import Any + +from ._core._compat import maybe_async, maybe_async_cm +from ._core._eventloop import ( + current_time, + get_all_backends, + get_cancelled_exc_class, + run, + sleep, + sleep_forever, + sleep_until, +) +from ._core._exceptions import ( + BrokenResourceError, + BrokenWorkerProcess, + BusyResourceError, + ClosedResourceError, + DelimiterNotFound, + EndOfStream, + ExceptionGroup, + IncompleteRead, + TypedAttributeLookupError, + WouldBlock, +) +from ._core._fileio import AsyncFile, Path, open_file, wrap_file +from ._core._resources import aclose_forcefully +from ._core._signals import open_signal_receiver +from ._core._sockets import ( + connect_tcp, + connect_unix, + create_connected_udp_socket, + create_tcp_listener, + create_udp_socket, + create_unix_listener, + getaddrinfo, + getnameinfo, + wait_socket_readable, + wait_socket_writable, +) +from ._core._streams import create_memory_object_stream +from ._core._subprocesses import open_process, run_process +from ._core._synchronization import ( + CapacityLimiter, + CapacityLimiterStatistics, + Condition, + ConditionStatistics, + Event, + EventStatistics, + Lock, + LockStatistics, + Semaphore, + SemaphoreStatistics, + create_capacity_limiter, + create_condition, + create_event, + create_lock, + create_semaphore, +) +from ._core._tasks import ( + TASK_STATUS_IGNORED, + CancelScope, + create_task_group, + current_effective_deadline, + fail_after, + move_on_after, + open_cancel_scope, +) +from ._core._testing import ( + TaskInfo, + get_current_task, + get_running_tasks, + wait_all_tasks_blocked, +) +from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute + +# Re-exported here, for backwards compatibility +# isort: off +from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread +from .from_thread import ( + create_blocking_portal, + run_async_from_thread, + run_sync_from_thread, + start_blocking_portal, +) + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio."): + value.__module__ = __name__ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_asyncio.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 00000000..d2bbc94c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2181 @@ +import array +import asyncio +import concurrent.futures +import math +import socket +import sys +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from concurrent.futures import Future +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + GEN_RUNNING, + GEN_SUSPENDED, + getcoroutinestate, + getgeneratorstate, +) +from io import IOBase +from os import PathLike +from queue import Queue +from socket import AddressFamily, SocketKind +from threading import Thread +from types import TracebackType +from typing import ( + IO, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + Coroutine, + Deque, + Dict, + Generator, + Iterable, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._exceptions import WouldBlock +from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType +from ..lowlevel import RunVar + +if sys.version_info >= (3, 8): + + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task.get_coro() + +else: + + def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]: + return task._coro + + +if sys.version_info >= (3, 7): + from asyncio import all_tasks, create_task, current_task, get_running_loop + from asyncio import run as native_run + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return [cb for cb, context in task._callbacks] # type: ignore[attr-defined] + +else: + _T = TypeVar("_T") + + def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: + return task._callbacks + + def native_run(main, *, debug=False): + # Snatched from Python 3.7 + from asyncio import coroutines, events, tasks + + def _cancel_all_tasks(loop): + to_cancel = all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete( + tasks.gather(*to_cancel, loop=loop, return_exceptions=True) + ) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + if events._get_running_loop() is not None: + raise RuntimeError( + "asyncio.run() cannot be called from a running event loop" + ) + + if not coroutines.iscoroutine(main): + raise ValueError(f"a coroutine was expected, got {main!r}") + + loop = events.new_event_loop() + try: + events.set_event_loop(loop) + loop.set_debug(debug) + return loop.run_until_complete(main) + finally: + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + events.set_event_loop(None) + loop.close() + + def create_task( + coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, name: object = None + ) -> asyncio.Task: + return get_running_loop().create_task(coro) + + def get_running_loop() -> asyncio.AbstractEventLoop: + loop = asyncio._get_running_loop() + if loop is not None: + return loop + else: + raise RuntimeError("no running event loop") + + def all_tasks( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Set[asyncio.Task]: + """Return a set of all tasks for the loop.""" + from asyncio import Task + + if loop is None: + loop = get_running_loop() + + return {t for t in Task.all_tasks(loop) if not t.done()} + + def current_task( + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> Optional[asyncio.Task]: + if loop is None: + loop = get_running_loop() + + return asyncio.Task.current_task(loop) + + +T_Retval = TypeVar("T_Retval") + +# Check whether there is native support for task names in asyncio (3.8+) +_native_task_names = hasattr(asyncio.Task, "get_name") + + +_root_task: RunVar[Optional[asyncio.Task]] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + for cb in _get_task_callbacks(task): + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars = ( + WeakKeyDictionary() +) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] + +current_token = get_running_loop + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + coro = cast(Coroutine[Any, Any, Any], get_coro(task)) + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + try: + return getgeneratorstate(cast(Generator, coro)) in ( + GEN_RUNNING, + GEN_SUSPENDED, + ) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") + + +def _maybe_set_event_loop_policy( + policy: Optional[asyncio.AbstractEventLoopPolicy], use_uvloop: bool +) -> None: + # On CPython, use uvloop when possible if no other policy has been given and if not + # explicitly disabled + if policy is None and use_uvloop and sys.implementation.name == "cpython": + try: + import uvloop + except ImportError: + pass + else: + # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) + if not hasattr( + asyncio.AbstractEventLoop, "shutdown_default_executor" + ) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"): + policy = uvloop.EventLoopPolicy() + + if policy is not None: + asyncio.set_event_loop_policy(policy) + + +def run( + func: Callable[..., Awaitable[T_Retval]], + *args: object, + debug: bool = False, + use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None, +) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task_state = TaskState(None, get_callable_name(func), None) + _task_states[task] = task_state + if _native_task_names: + task.set_name(task_state.name) + + try: + return await func(*args) + finally: + del _task_states[task] + + _maybe_set_event_loop_policy(policy, use_uvloop) + return native_run(wrapper(), debug=debug) + + +# +# Miscellaneous +# + +sleep = asyncio.sleep + + +# +# Timeouts and cancellation +# + +CancelledError = asyncio.CancelledError + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> "CancelScope": + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: Optional[CancelScope] = None + self._cancel_called = False + self._active = False + self._timeout_handle: Optional[asyncio.TimerHandle] = None + self._cancel_handle: Optional[asyncio.Handle] = None + self._tasks: Set[asyncio.Task] = set() + self._host_task: Optional[asyncio.Task] = None + self._timeout_expired = False + + def __enter__(self) -> "CancelScope": + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_name = host_task.get_name() if _native_task_names else None + task_state = TaskState(None, task_name, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + + self._timeout() + self._active = True + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the farthest directly cancelled parent scope if this + # one was shielded + if self._shield: + self._deliver_cancellation_to_parent() + + if exc_val is not None: + exceptions = ( + exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] + ) + if all(isinstance(exc, CancelledError) for exc in exceptions): + if self._timeout_expired: + return True + elif not self._cancel_called: + # Task was cancelled natively + return None + elif not self._parent_cancelled(): + # This scope was directly cancelled + return True + + return None + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self._timeout_expired = True + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self) -> None: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for cancellation. + """ + should_retry = False + current = current_task() + for task in self._tasks: + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started and is not in a cancel + # scope shielded from this one + cancel_scope = _task_states[task].cancel_scope + while cancel_scope is not self: + if cancel_scope is None or cancel_scope._shield: + break + else: + cancel_scope = cancel_scope._parent_scope + else: + should_retry = True + if task is not current and ( + task is self._host_task or _task_started(task) + ): + task.cancel() + + # Schedule another callback if there are still tasks left + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation + ) + else: + self._cancel_handle = None + + def _deliver_cancellation_to_parent(self) -> None: + """Start cancellation effort in the farthest directly cancelled parent scope""" + scope = self._parent_scope + scope_to_cancel: Optional[CancelScope] = None + while scope is not None: + if scope._cancel_called and scope._cancel_handle is None: + scope_to_cancel = scope + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + if scope_to_cancel is not None: + scope_to_cancel._deliver_cancellation() + + def _parent_cancelled(self) -> bool: + # Check whether any parent has been cancelled + cancel_scope = self._parent_scope + while cancel_scope is not None and not cancel_scope._shield: + if cancel_scope._cancel_called: + return True + else: + cancel_scope = cancel_scope._parent_scope + + return False + + def cancel(self) -> DeprecatedAwaitable: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._deliver_cancellation() + + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._deliver_cancellation_to_parent() + + +async def checkpoint() -> None: + await sleep(0) + + +async def checkpoint_if_cancelled() -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + +async def cancel_shielded_checkpoint() -> None: + with CancelScope(shield=True): + await sleep(0) + + +def current_effective_deadline() -> float: + try: + cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + +def current_time() -> float: + return get_running_loop().time() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance itself + because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "name", "cancel_scope" + + def __init__( + self, + parent_id: Optional[int], + name: Optional[str], + cancel_scope: Optional[CancelScope], + ): + self.parent_id = parent_id + self.name = name + self.cancel_scope = cancel_scope + + +_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] + + +# +# Task groups +# + + +class ExceptionGroup(BaseExceptionGroup): + def __init__(self, exceptions: List[BaseException]): + super().__init__() + self.exceptions = exceptions + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: object = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: List[BaseException] = [] + + async def __aenter__(self) -> "TaskGroup": + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if exc_val is not None: + self.cancel_scope.cancel() + self._exceptions.append(exc_val) + + while self.cancel_scope._tasks: + try: + await asyncio.wait(self.cancel_scope._tasks) + except asyncio.CancelledError: + self.cancel_scope.cancel() + + self._active = False + if not self.cancel_scope._parent_cancelled(): + exceptions = self._filter_cancellation_errors(self._exceptions) + else: + exceptions = self._exceptions + + try: + if len(exceptions) > 1: + if all( + isinstance(e, CancelledError) and not e.args for e in exceptions + ): + # Tasks were cancelled natively, without a cancellation message + raise CancelledError + else: + raise ExceptionGroup(exceptions) + elif exceptions and exceptions[0] is not exc_val: + raise exceptions[0] + except BaseException as exc: + # Clear the context here, as it can only be done in-flight. + # If the context is not cleared, it can result in recursive tracebacks (see #145). + exc.__context__ = None + raise + + return ignore_exception + + @staticmethod + def _filter_cancellation_errors( + exceptions: Sequence[BaseException], + ) -> List[BaseException]: + filtered_exceptions: List[BaseException] = [] + for exc in exceptions: + if isinstance(exc, ExceptionGroup): + new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) + if len(new_exceptions) > 1: + filtered_exceptions.append(exc) + elif len(new_exceptions) == 1: + filtered_exceptions.append(new_exceptions[0]) + elif new_exceptions: + new_exc = ExceptionGroup(new_exceptions) + new_exc.__cause__ = exc.__cause__ + new_exc.__context__ = exc.__context__ + new_exc.__traceback__ = exc.__traceback__ + filtered_exceptions.append(new_exc) + elif not isinstance(exc, CancelledError) or exc.args: + filtered_exceptions.append(exc) + + return filtered_exceptions + + async def _run_wrapped_task( + self, coro: Coroutine, task_status_future: Optional[asyncio.Future] + ) -> None: + # This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises + # a BaseException. + __traceback_hide__ = __tracebackhide__ = True # noqa: F841 + task = cast(asyncio.Task, current_task()) + try: + await coro + except BaseException as exc: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + else: + if task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + finally: + if task in self.cancel_scope._tasks: + self.cancel_scope._tasks.remove(task) + del _task_states[task] + + def _spawn( + self, + func: Callable[..., Coroutine], + args: tuple, + name: object, + task_status_future: Optional[asyncio.Future] = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + # This is the code path for Python 3.8+ + assert _task in self.cancel_scope._tasks + self.cancel_scope._tasks.remove(_task) + del _task_states[_task] + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + if task_status_future is None or task_status_future.done(): + self._exceptions.append(exc) + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + options = {} + name = get_callable_name(func) if name is None else str(name) + if _native_task_names: + options["name"] = name + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not asyncio.iscoroutine(coro): + raise TypeError( + f"Expected an async function, but {func} appears to be synchronous" + ) + + foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame") + if foreign_coro or sys.version_info < (3, 8): + coro = self._run_wrapped_task(coro, task_status_future) + + task = create_task(coro, **options) + if not foreign_coro and sys.version_info >= (3, 8): + task.add_done_callback(task_done) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, name=name, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + return task + + def start_soon( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> None: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch point + # between, the task group is cancelled and this method never proceeds to process the + # completed future. That's why we have to have a shielded cancel scope here. + with CancelScope(shield=True): + try: + return await future + except CancelledError: + task.cancel() + raise + + +# +# Threads +# + +_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: Set["WorkerThread"], + idle_workers: Deque["WorkerThread"], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + Union[Tuple[Context, Callable, tuple, asyncio.Future], None] + ] = Queue(2) + self.idle_since = current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: Optional[BaseException] + ) -> None: + self.idle_since = current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread("asyncio"): + threadlocals.loop = self.loop + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future = item + if not future.cancelled(): + result = None + exception: Optional[BaseException] = None + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + self.queue.task_done() + + def stop(self, f: Optional[asyncio.Task] = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar("_threadpool_workers") + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional["CapacityLimiter"] = None, +) -> T_Retval: + await checkpoint() + + # If this is the first run in this event loop thread, set up the necessary variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with (limiter or current_default_thread_limiter()): + with CancelScope(shield=not cancellable): + future: asyncio.Future = asyncio.Future() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer + now = current_time() + while idle_workers: + if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback(expired_worker.stop) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + worker.queue.put_nowait((context, func, args, future)) + return await future + + +def run_sync_from_thread( + func: Callable[..., T_Retval], + *args: object, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = loop or threadlocals.loop + if sys.version_info < (3, 7): + loop.call_soon_threadsafe(copy_context().run, wrapper) + else: + loop.call_soon_threadsafe(wrapper) + + return f.result() + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( + func(*args), threadlocals.loop + ) + return f.result() + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + loop=self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.feed_eof() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: Optional[StreamWriterWrapper] + _stdout: Optional[StreamReaderWrapper] + _stderr: Optional[StreamReaderWrapper] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + shell: bool, + stdin: Union[int, IO[Any], None], + stdout: Union[int, IO[Any], None], + stderr: Union[int, IO[Any], None], + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + await checkpoint() + if shell: + process = await asyncio.create_subprocess_shell( + cast(Union[str, bytes], command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +def _forcibly_shutdown_process_pool_on_exit( + workers: Set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: Optional[asyncio.AbstractChildWatcher] + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + child_watcher = None + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). + + """ + process: Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + kwargs = {"name": "AnyIO process pool shutdown task"} if _native_task_names else {} + create_task(_shutdown_process_pool_on_exit(workers), **kwargs) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) + ) + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: Deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Optional[Exception]) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + self.read_queue.append(data) + self.read_event.set() + + def eof_received(self) -> Optional[bool]: + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: Deque[Tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Optional[Exception] = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + await checkpoint() + + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will block until + # data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class UNIXSocketStream(abc.SocketStream): + _receive_future: Optional[asyncio.Future] = None + _send_future: Optional[asyncio.Future] = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + self._loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await checkpoint() + with self._receive_guard: + while True: + try: + data = self.__raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self.__raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self.__raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self.__raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: Optional[CancelScope] = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(cast(asyncio.Transport, transport), protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +async def connect_tcp( + host: str, port: int, local_addr: Optional[Tuple[str, int]] = None +) -> SocketStream: + transport, protocol = cast( + Tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_addr + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + +async def connect_unix(path: str) -> UNIXSocketStream: + await checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool, +) -> Union[UDPSocket, ConnectedUDPSocket]: + result = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + transport = cast(asyncio.DatagramTransport, result[0]) + protocol = result[1] + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + +async def getaddrinfo( + host: Union[bytes, str], + port: Union[str, int, None], + *, + family: Union[int, AddressFamily] = 0, + type: Union[int, SocketKind] = 0, + proto: int = 0, + flags: int = 0, +) -> GetAddrInfoReturnType: + # https://github.com/python/typeshed/pull/4304 + result = await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + return cast(GetAddrInfoReturnType, result) + + +async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + +_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("write_events") + + +async def wait_socket_readable(sock: socket.socket) -> None: + await checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if read_events.get(sock): + raise BusyResourceError("reading from") from None + + loop = get_running_loop() + event = read_events[sock] = asyncio.Event() + loop.add_reader(sock, event.set) + try: + await event.wait() + finally: + if read_events.pop(sock, None) is not None: + loop.remove_reader(sock) + readable = True + else: + readable = False + + if not readable: + raise ClosedResourceError + + +async def wait_socket_writable(sock: socket.socket) -> None: + await checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if write_events.get(sock): + raise BusyResourceError("writing to") from None + + loop = get_running_loop() + event = write_events[sock] = asyncio.Event() + loop.add_writer(sock.fileno(), event.set) + try: + await event.wait() + finally: + if write_events.pop(sock, None) is not None: + loop.remove_writer(sock) + writable = True + else: + writable = False + + if not writable: + raise ClosedResourceError + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> DeprecatedAwaitable: + self._event.set() + return DeprecatedAwaitable(self.set) + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if await self._event.wait(): + await checkpoint() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: Set[Any] = set() + self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + old_value = self._total_tokens + self._total_tokens = value + events = [] + for event in self._wait_queue.values(): + if value <= old_value: + break + + if not event.is_set(): + events.append(event) + old_value += 1 + + for event in events: + event.set() + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.acquire_on_behalf_of_nowait(current_task()) + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's " + "tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's " "tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem()[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + +# +# Operating system signals +# + + +class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): + def __init__(self, signals: Tuple[int, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: Deque[int] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: Set[int] = set() + + def _deliver(self, signum: int) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> "_SignalReceiver": + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + return None + + def __aiter__(self) -> "_SignalReceiver": + return self + + async def __anext__(self) -> int: + await checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +def open_signal_receiver(*signals: int) -> _SignalReceiver: + return _SignalReceiver(signals) + + +# +# Testing and debugging +# + + +def _create_task_info(task: asyncio.Task) -> TaskInfo: + task_state = _task_states.get(task) + if task_state is None: + name = task.get_name() if _native_task_names else None + parent_id = None + else: + name = task_state.name + parent_id = task_state.parent_id + + return TaskInfo(id(task), parent_id, name, get_coro(task)) + + +def get_current_task() -> TaskInfo: + return _create_task_info(current_task()) # type: ignore[arg-type] + + +def get_running_tasks() -> List[TaskInfo]: + return [_create_task_info(task) for task in all_tasks() if not task.done()] + + +async def wait_all_tasks_blocked() -> None: + await checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] + await sleep(0.1) + break + else: + return + + +class TestRunner(abc.TestRunner): + def __init__( + self, + debug: bool = False, + use_uvloop: bool = False, + policy: Optional[asyncio.AbstractEventLoopPolicy] = None, + ): + self._exceptions: List[BaseException] = [] + _maybe_set_event_loop_policy(policy, use_uvloop) + self._loop = asyncio.new_event_loop() + self._loop.set_debug(debug) + self._loop.set_exception_handler(self._exception_handler) + asyncio.set_event_loop(self._loop) + + def _cancel_all_tasks(self) -> None: + to_cancel = all_tasks(self._loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + self._loop.run_until_complete( + asyncio.gather(*to_cancel, return_exceptions=True) + ) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + raise cast(BaseException, task.exception()) + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: Dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise ExceptionGroup(exceptions) + + def close(self) -> None: + try: + self._cancel_all_tasks() + self._loop.run_until_complete(self._loop.shutdown_asyncgens()) + finally: + asyncio.set_event_loop(None) + self._loop.close() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: Dict[str, Any], + ) -> Iterable[T_Retval]: + async def fixture_runner() -> None: + agen = fixture_func(**kwargs) + try: + retval = await agen.asend(None) + self._raise_async_exceptions() + except BaseException as exc: + f.set_exception(exc) + return + else: + f.set_result(retval) + + await event.wait() + try: + await agen.asend(None) + except StopAsyncIteration: + pass + else: + await agen.aclose() + raise RuntimeError("Async generator fixture did not stop") + + f = self._loop.create_future() + event = asyncio.Event() + fixture_task = self._loop.create_task(fixture_runner()) + self._loop.run_until_complete(f) + yield f.result() + event.set() + self._loop.run_until_complete(fixture_task) + self._raise_async_exceptions() + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: Dict[str, Any], + ) -> T_Retval: + retval = self._loop.run_until_complete(fixture_func(**kwargs)) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + try: + self._loop.run_until_complete(test_func(**kwargs)) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_trio.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_trio.py new file mode 100644 index 00000000..cf2aaece --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,988 @@ +import array +import math +import socket +from concurrent.futures import Future +from contextvars import copy_context +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + ContextManager, + Coroutine, + Deque, + Dict, + Generic, + Iterable, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import sniffio +import trio.from_thread +from outcome import Error, Outcome, Value +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc +from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._exceptions import ExceptionGroup as BaseExceptionGroup +from .._core._sockets import convert_ipv6_sockaddr +from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import ResourceGuard +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType + +if TYPE_CHECKING: + from trio_typing import TaskStatus + +try: + from trio import lowlevel as trio_lowlevel +except ImportError: + from trio import hazmat as trio_lowlevel # type: ignore[no-redef] + from trio.hazmat import wait_readable, wait_writable +else: + from trio.lowlevel import wait_readable, wait_writable + +try: + trio_open_process = trio_lowlevel.open_process # type: ignore[attr-defined] +except AttributeError: + from trio import open_process as trio_open_process + +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) + + +# +# Event loop +# + +run = trio.run +current_token = trio.lowlevel.current_trio_token +RunVar = trio.lowlevel.RunVar + + +# +# Miscellaneous +# + +sleep = trio.sleep + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: Optional[trio.CancelScope] = None, **kwargs: object + ) -> "CancelScope": + return object.__new__(cls) + + def __init__( + self, original: Optional[trio.CancelScope] = None, **kwargs: Any + ) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> "CancelScope": + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> DeprecatedAwaitable: + self.__original.cancel() + return DeprecatedAwaitable(self.cancel) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +CancelledError = trio.Cancelled +checkpoint = trio.lowlevel.checkpoint +checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled +cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint +current_effective_deadline = trio.current_effective_deadline +current_time = trio.current_time + + +# +# Task groups +# + + +class ExceptionGroup(BaseExceptionGroup, trio.MultiError): + pass + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery() + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> "TaskGroup": + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + try: + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) + except trio.MultiError as exc: + raise ExceptionGroup(exc.exceptions) from None + finally: + self._active = False + + def start_soon(self, func: Callable, *args: object, name: object = None) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Coroutine], *args: object, name: object = None + ) -> object: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[trio.CapacityLimiter] = None, +) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread("trio"): + return func(*args) + + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + return await run_sync( + context.run, wrapper, cancellable=cancellable, limiter=limiter + ) + + +# TODO: remove this workaround when trio 0.20 is the minimum requirement +def run_async_from_thread( + fn: Callable[..., Awaitable[T_Retval]], *args: Any +) -> T_Retval: + async def wrapper() -> T_Retval: + retval: T_Retval + + async def inner() -> None: + nonlocal retval + __tracebackhide__ = True + retval = await fn(*args) + + async with trio.open_nursery() as n: + context.run(n.start_soon, inner) + + __tracebackhide__ = True + return retval + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "trio") + return trio.from_thread.run(wrapper) + + +def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: + # TODO: remove explicit context copying when trio 0.20 is the minimum requirement + retval = trio.from_thread.run_sync(copy_context().run, fn, *args) + return cast(T_Retval, retval) + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> "BlockingPortal": + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "trio") + trio.from_thread.run_sync( + context.run, + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: Optional[int] = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: Optional[abc.ByteSendStream] + _stdout: Optional[abc.ByteReceiveStream] + _stderr: Optional[abc.ByteReceiveStream] + + async def aclose(self) -> None: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + await self.wait() + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> Optional[int]: + return self._process.returncode + + @property + def stdin(self) -> Optional[abc.ByteSendStream]: + return self._stdin + + @property + def stdout(self) -> Optional[abc.ByteReceiveStream]: + return self._stdout + + @property + def stderr(self) -> Optional[abc.ByteReceiveStream]: + return self._stderr + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + shell: bool, + stdin: Union[int, IO[Any], None], + stdout: Union[int, IO[Any], None], + stderr: Union[int, IO[Any], None], + cwd: Union[str, bytes, PathLike, None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + process = await trio_open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=shell, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: Set[Process]) -> None: + process: Process + try: + await sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> "NoReturn": + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: List[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, # type: ignore[list-item] + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> Tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +async def connect_tcp( + host: str, port: int, local_address: Optional[IPSockAddrType] = None +) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + +async def connect_unix(path: str) -> UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + +async def create_udp_socket( + family: socket.AddressFamily, + local_address: Optional[IPSockAddrType], + remote_address: Optional[IPSockAddrType], + reuse_port: bool, +) -> Union[UDPSocket, ConnectedUDPSocket]: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + +getaddrinfo = trio.socket.getaddrinfo +getnameinfo = trio.socket.getnameinfo + + +async def wait_socket_readable(sock: socket.socket) -> None: + try: + await wait_readable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + +async def wait_socket_writable(sock: socket.socket) -> None: + try: + await wait_writable(sock) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> "Event": + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> DeprecatedAwaitable: + self.__original.set() + return DeprecatedAwaitable(self.set) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter": + return object.__new__(cls) + + def __init__( + self, *args: Any, original: Optional[trio.CapacityLimiter] = None + ) -> None: + self.__original = original or trio.CapacityLimiter(*args) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> DeprecatedAwaitable: + self.__original.acquire_nowait() + return DeprecatedAwaitable(self.acquire_nowait) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + self.__original.acquire_on_behalf_of_nowait(borrower) + return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=orig.borrowers, + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper") + + +def current_default_thread_limiter() -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + +# +# Signal handling +# + + +class _SignalReceiver(DeprecatedAsyncContextManager[T]): + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + def __enter__(self) -> T: + return self._cm.__enter__() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def open_signal_receiver(*signals: Signals) -> _SignalReceiver: + cm = trio.open_signal_receiver(*signals) + return _SignalReceiver(cm) + + +# +# Testing and debugging +# + + +def get_current_task() -> TaskInfo: + task = trio_lowlevel.current_task() + + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + return TaskInfo(id(task), parent_id, task.name, task.coro) + + +def get_running_tasks() -> List[TaskInfo]: + root_task = trio_lowlevel.current_root_task() + task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: List[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append( + TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro) + ) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + +def wait_all_tasks_blocked() -> Awaitable[None]: + import trio.testing + + return trio.testing.wait_all_tasks_blocked() + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from collections import deque + from queue import Queue + + self._call_queue: "Queue[Callable[..., object]]" = Queue() + self._result_queue: Deque[Outcome] = deque() + self._stop_event: Optional[trio.Event] = None + self._nursery: Optional[trio.Nursery] = None + self._options = options + + async def _trio_main(self) -> None: + self._stop_event = trio.Event() + async with trio.open_nursery() as self._nursery: + await self._stop_event.wait() + + async def _call_func( + self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict + ) -> None: + try: + retval = await func(*args, **kwargs) + except BaseException as exc: + self._result_queue.append(Error(exc)) + else: + self._result_queue.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._nursery = None + + def _get_nursery(self) -> trio.Nursery: + if self._nursery is None: + trio.lowlevel.start_guest_run( + self._trio_main, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._nursery is None: + self._call_queue.get()() + + return self._nursery + + def _call( + self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object + ) -> T_Retval: + self._get_nursery().start_soon(self._call_func, func, args, kwargs) + while not self._result_queue: + self._call_queue.get()() + + outcome = self._result_queue.pop() + return outcome.unwrap() + + def close(self) -> None: + if self._stop_event: + self._stop_event.set() + while self._nursery is not None: + self._call_queue.get()() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: Dict[str, Any], + ) -> Iterable[T_Retval]: + async def fixture_runner(*, task_status: "TaskStatus") -> None: + agen = fixture_func(**kwargs) + retval = await agen.asend(None) + task_status.started(retval) + await teardown_event.wait() + try: + await agen.asend(None) + except StopAsyncIteration: + pass + else: + await agen.aclose() + raise RuntimeError("Async generator fixture did not stop") + + teardown_event = trio.Event() + fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner)) + yield fixture_value + teardown_event.set() + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: Dict[str, Any], + ) -> T_Retval: + return self._call(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + self._call(test_func, **kwargs) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_compat.py new file mode 100644 index 00000000..7062be5d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_compat.py @@ -0,0 +1,218 @@ +from abc import ABCMeta, abstractmethod +from contextlib import AbstractContextManager +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncContextManager, + Callable, + ContextManager, + Generator, + Generic, + Iterable, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) +from warnings import warn + +if TYPE_CHECKING: + from ._testing import TaskInfo +else: + TaskInfo = object + +T = TypeVar("T") +AnyDeprecatedAwaitable = Union[ + "DeprecatedAwaitable", + "DeprecatedAwaitableFloat", + "DeprecatedAwaitableList[T]", + TaskInfo, +] + + +@overload +async def maybe_async(__obj: TaskInfo) -> TaskInfo: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitableFloat") -> float: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitableList[T]") -> List[T]: + ... + + +@overload +async def maybe_async(__obj: "DeprecatedAwaitable") -> None: + ... + + +async def maybe_async( + __obj: "AnyDeprecatedAwaitable[T]", +) -> Union[TaskInfo, float, List[T], None]: + """ + Await on the given object if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were converted from coroutine functions into regular functions. + + Do **not** try to use this for any other purpose! + + :return: the result of awaiting on the object if coroutine, or the object itself otherwise + + .. versionadded:: 2.2 + + """ + return __obj._unwrap() + + +class _ContextManagerWrapper: + def __init__(self, cm: ContextManager[T]): + self._cm = cm + + async def __aenter__(self) -> T: + return self._cm.__enter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + +def maybe_async_cm( + cm: Union[ContextManager[T], AsyncContextManager[T]] +) -> AsyncContextManager[T]: + """ + Wrap a regular context manager as an async one if necessary. + + This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and + methods were changed to return regular context managers instead of async ones. + + :param cm: a regular or async context manager + :return: an async context manager + + .. versionadded:: 2.2 + + """ + if not isinstance(cm, AbstractContextManager): + raise TypeError("Given object is not an context manager") + + return _ContextManagerWrapper(cm) + + +def _warn_deprecation( + awaitable: "AnyDeprecatedAwaitable[Any]", stacklevel: int = 1 +) -> None: + warn( + f'Awaiting on {awaitable._name}() is deprecated. Use "await ' + f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x " + f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', + DeprecationWarning, + stacklevel=stacklevel + 1, + ) + + +class DeprecatedAwaitable: + def __init__(self, func: Callable[..., "DeprecatedAwaitable"]): + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, None]: + _warn_deprecation(self) + if False: + yield + + def __reduce__(self) -> Tuple[Type[None], Tuple[()]]: + return type(None), () + + def _unwrap(self) -> None: + return None + + +class DeprecatedAwaitableFloat(float): + def __new__( + cls, x: float, func: Callable[..., "DeprecatedAwaitableFloat"] + ) -> "DeprecatedAwaitableFloat": + return super().__new__(cls, x) + + def __init__(self, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]): + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, float]: + _warn_deprecation(self) + if False: + yield + + return float(self) + + def __reduce__(self) -> Tuple[Type[float], Tuple[float]]: + return float, (float(self),) + + def _unwrap(self) -> float: + return float(self) + + +class DeprecatedAwaitableList(List[T]): + def __init__( + self, + iterable: Iterable[T] = (), + *, + func: Callable[..., "DeprecatedAwaitableList[T]"], + ): + super().__init__(iterable) + self._name = f"{func.__module__}.{func.__qualname__}" + + def __await__(self) -> Generator[None, None, List[T]]: + _warn_deprecation(self) + if False: + yield + + return list(self) + + def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]: + return list, (list(self),) + + def _unwrap(self) -> List[T]: + return list(self) + + +class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): + @abstractmethod + def __enter__(self) -> T: + pass + + @abstractmethod + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + pass + + async def __aenter__(self) -> T: + warn( + f"Using {self.__class__.__name__} as an async context manager has been deprecated. " + f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' + f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' + f"you are completely migrating to AnyIO 3+.", + DeprecationWarning, + ) + return self.__enter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_eventloop.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 00000000..f027ae50 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,155 @@ +import math +import sys +import threading +from contextlib import contextmanager +from importlib import import_module +from typing import ( + Any, + Callable, + Coroutine, + Dict, + Generator, + Optional, + Tuple, + Type, + TypeVar, +) + +import sniffio + +# This must be updated when new backends are introduced +from ._compat import DeprecatedAwaitableFloat + +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +threadlocals = threading.local() + + +def run( + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + backend: str = "asyncio", + backend_options: Optional[Dict[str, Any]] = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently either + ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` implementation with + (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + asynclib = import_module(f"..._backends._{backend}", package=__name__) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return asynclib.run(func, *args, **backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_asynclib().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal monotonic clock of + the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> DeprecatedAwaitableFloat: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) + + +def get_all_backends() -> Tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> Type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_asynclib().CancelledError + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread(backend: str) -> Generator[Any, None, None]: + module = sys.modules["anyio._backends._" + backend] + threadlocals.current_async_module = module + try: + yield + finally: + del threadlocals.current_async_module + + +def get_asynclib(asynclib_name: Optional[str] = None) -> Any: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + modulename = "anyio._backends._" + asynclib_name + try: + return sys.modules[modulename] + except KeyError: + return import_module(modulename) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 00000000..db2bbcf5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,93 @@ +from traceback import format_exception +from typing import List + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external causes + (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise + misbehaves. + """ + + +class BusyResourceError(Exception): + """Raised when two tasks are trying to read from or write to the same resource concurrently.""" + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """Raised when trying to read from a stream that has been closed from the other end.""" + + +class ExceptionGroup(BaseException): + """ + Raised when multiple exceptions have been raised in a task group. + + :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together + """ + + SEPARATOR = "----------------------------\n" + + exceptions: List[BaseException] + + def __str__(self) -> str: + tracebacks = [ + "".join(format_exception(type(exc), exc, exc.__traceback__)) + for exc in self.exceptions + ] + return ( + f"{len(self.exceptions)} exceptions were raised in the task group:\n" + f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}" + ) + + def __repr__(self) -> str: + exception_reprs = ", ".join(repr(exc) for exc in self.exceptions) + return f"<{self.__class__.__name__}: {exception_reprs}>" + + +class IncompleteRead(Exception): + """ + Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not + found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_fileio.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_fileio.py new file mode 100644 index 00000000..19c1e834 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,607 @@ +import os +import pathlib +import sys +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + AsyncIterator, + Callable, + Generic, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if sys.version_info >= (3, 8): + from typing import Final +else: + from typing_extensions import Final + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the following + blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the underlying file + at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: "AsyncFile[bytes]", size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> List[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: "AsyncFile[bytes]", b: ReadableBuffer) -> int: + ... + + @overload + async def write(self: "AsyncFile[str]", b: str) -> int: + ... + + async def write(self, b: Union[ReadableBuffer, str]) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: "AsyncFile[bytes]", lines: Iterable[ReadableBuffer] + ) -> None: + ... + + @overload + async def writelines(self: "AsyncFile[str]", lines: Iterable[str]) -> None: + ... + + async def writelines( + self, lines: Union[Iterable[ReadableBuffer], Iterable[str]] + ) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: Optional[int] = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: OpenBinaryMode, + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ..., +) -> AsyncFile[bytes]: + ... + + +@overload +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + closefd: bool = ..., + opener: Optional[Callable[[str, int], int]] = ..., +) -> AsyncFile[str]: + ... + + +async def open_file( + file: Union[str, "PathLike[str]", int], + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + closefd: bool = True, + opener: Optional[Callable[[str, int], int]] = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator["PathLike[str]"] + + async def __anext__(self) -> "Path": + nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) + if nextval is None: + raise StopAsyncIteration from None + + return Path(cast("PathLike[str]", nextval)) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but + it is compatible with the :class:`os.PathLike` interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the + deprecated :meth:`~pathlib.Path.link_to` method. + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: Union[str, "PathLike[str]"]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: "Path") -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: Any) -> "Path": + return Path(self._path / other) + + def __rtruediv__(self, other: Any) -> "Path": + return Path(other) / self + + @property + def parts(self) -> Tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence["Path"]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> "Path": + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> List[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> "Path": + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + def is_relative_to(self, *other: Union[str, "PathLike[str]"]) -> bool: + try: + self.relative_to(*other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> "Path": + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, cancellable=True) + + async def expanduser(self) -> "Path": + return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) + + def glob(self, pattern: str) -> AsyncIterator["Path"]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, cancellable=True) + + async def hardlink_to(self, target: Union[str, pathlib.Path, "Path"]) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> "Path": + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync(self._path.is_block_device, cancellable=True) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync(self._path.is_char_device, cancellable=True) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, cancellable=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, cancellable=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, cancellable=True) + + async def is_mount(self) -> bool: + return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, cancellable=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, cancellable=True) + + def iterdir(self) -> AsyncIterator["Path"]: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: Union[str, "PathLike[str]"]) -> "Path": + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, cancellable=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + ) -> AsyncFile[bytes]: + ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: Optional[str] = ..., + errors: Optional[str] = ..., + newline: Optional[str] = ..., + ) -> AsyncFile[str]: + ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, cancellable=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: Optional[str] = None, errors: Optional[str] = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + def relative_to(self, *other: Union[str, "PathLike[str]"]) -> "Path": + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> "Path": + target = await to_thread.run_sync(os.readlink, self._path) + return Path(cast(str, target)) + + async def rename(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path": + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path": + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> "Path": + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, cancellable=True)) + + def rglob(self, pattern: str) -> AsyncIterator["Path"]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile( + self, other_path: Union[str, bytes, int, pathlib.Path, "Path"] + ) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, cancellable=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, cancellable=True) + + async def symlink_to( + self, + target: Union[str, pathlib.Path, "Path"], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + def with_name(self, name: str) -> "Path": + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> "Path": + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> "Path": + return Path(self._path.with_suffix(suffix)) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_resources.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_resources.py new file mode 100644 index 00000000..b9414f7b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_resources.py @@ -0,0 +1,16 @@ +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_signals.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_signals.py new file mode 100644 index 00000000..02234fd2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_signals.py @@ -0,0 +1,24 @@ +from typing import AsyncIterator + +from ._compat import DeprecatedAsyncContextManager +from ._eventloop import get_asynclib + + +def open_signal_receiver( + *signals: int, +) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields signal + numbers + + .. warning:: Windows does not support signals natively so it is best to avoid relying on this + in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for the given + signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_asynclib().open_signal_receiver(*signals) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_sockets.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_sockets.py new file mode 100644 index 00000000..ca85d30f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,587 @@ +import socket +import ssl +import sys +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from pathlib import Path +from socket import AddressFamily, SocketKind +from typing import Awaitable, List, Optional, Tuple, Union, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_asynclib +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +GetAddrInfoReturnType = List[ + Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]] +] +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + tls: Literal[True], + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: + ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + tls: Literal[False], + ssl_context: Optional[ssl.SSLContext] = ..., + tls_standard_compatible: bool = ..., + tls_hostname: Optional[str] = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: + ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: Optional[IPAddressType] = None, + tls: bool = False, + ssl_context: Optional[ssl.SSLContext] = None, + tls_standard_compatible: bool = True, + tls_hostname: Optional[str] = None, + happy_eyeballs_delay: float = 0.25, +) -> Union[SocketStream, TLSStream]: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555). + If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until + one connection attempt succeeds. If the first attempt does not connected within 250 + milliseconds, a second attempt is started using the next address in the list, and so on. + On IPv6 enabled systems, an IPv6 address (if available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing + the stream and requires that the server does this as well. Otherwise, + :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to the value + of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: Optional[SocketStream] = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_asynclib() + local_address: Optional[IPSockAddrType] = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) and the + # second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs: List[Tuple[socket.AddressFamily, str]] = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + else: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + + oserrors: List[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) + raise OSError("All connection attempts failed") from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: Union[str, "PathLike[str]"]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = str(Path(path)) + return await get_asynclib().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4 + and IPv6 interfaces. To listen on all interfaces on a specific address family, use + ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``interface`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_asynclib() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, # type: ignore[arg-type] + local_port, + family=family, + type=socket.SOCK_STREAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: List[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + for fam, *_, sockaddr in sorted(set(gai_res)): + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.TCPSocketListener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: Union[str, "PathLike[str]"], + *, + mode: Optional[int] = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or + 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be removed first. + + """ + path_str = str(path) + path = Path(path) + if path.is_socket(): + path.unlink() + + backlog = min(backlog, 65536) + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + try: + await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, cancellable=True) + + raw_socket.listen(backlog) + return get_asynclib().UNIXSocketListener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local machine, + making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + return await get_asynclib().create_udp_socket( + family, local_address, None, reuse_port + ) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: Optional[IPAddressType] = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, and any packets + sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from + ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port + (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + return await get_asynclib().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + + +async def getaddrinfo( + host: Union[bytearray, bytes, str], + port: Union[str, int, None], + *, + family: Union[int, AddressFamily] = 0, + type: Union[int, SocketKind] = 0, + proto: int = 0, + flags: int = 0, +) -> GetAddrInfoReturnType: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) IDNA 2008 + standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_asynclib().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_asynclib().getnameinfo(sockaddr, flags) + + +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket has data to be read. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_asynclib().wait_socket_readable(sock) + + +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor event loop + (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher level + constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_asynclib().wait_socket_writable(sock) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]] +) -> Tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) + if scope_id: + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return cast(Tuple[str, int], sockaddr) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_streams.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_streams.py new file mode 100644 index 00000000..58954a68 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_streams.py @@ -0,0 +1,45 @@ +import math +from typing import Any, Optional, Tuple, Type, TypeVar, overload + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +@overload +def create_memory_object_stream( + max_buffer_size: float, item_type: Type[T_Item] +) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + ... + + +@overload +def create_memory_object_stream( + max_buffer_size: float = 0, +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + ... + + +def create_memory_object_stream( + max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None +) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: + """ + Create a memory object stream. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking + :param item_type: type of item, for marking the streams with the right generic type for + static typing (not used at run time) + :return: a tuple of (send stream, receive stream) + + """ + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + + state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) + return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_subprocesses.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 00000000..43fa6b6f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,136 @@ +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import ( + IO, + Any, + AsyncIterable, + List, + Mapping, + Optional, + Sequence, + Union, + cast, +) + +from ..abc import Process +from ._eventloop import get_asynclib +from ._tasks import create_task_group + + +async def run_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + input: Optional[bytes] = None, + stdout: Union[int, IO[Any], None] = PIPE, + stderr: Union[int, IO[Any], None] = PIPE, + check: bool = True, + cwd: Union[str, bytes, "PathLike[str]", None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> "CompletedProcess[bytes]": + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or + :data:`subprocess.STDOUT` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process + terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the command + :param env: if not ``None``, this mapping replaces the inherited environment variables from the + parent process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a + nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process( + command, + stdin=PIPE if input else DEVNULL, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) as process: + stream_contents: List[Optional[bytes]] = [None, None] + try: + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + except BaseException: + process.kill() + raise + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: Union[str, bytes, Sequence[Union[str, bytes]]], + *, + stdin: Union[int, IO[Any], None] = PIPE, + stdout: Union[int, IO[Any], None] = PIPE, + stderr: Union[int, IO[Any], None] = PIPE, + cwd: Union[str, bytes, "PathLike[str]", None] = None, + env: Optional[Mapping[str, str]] = None, + start_new_session: bool = False, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings containing the + executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the environment + variables for the new process + :param start_new_session: if ``true`` the setsid() system call will be made in the child + process prior to the execution of the subprocess. (POSIX only) + :return: an asynchronous process object + + """ + shell = isinstance(command, str) + return await get_asynclib().open_process( + command, + shell=shell, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + start_new_session=start_new_session, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_synchronization.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 00000000..15d4afcd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,595 @@ +from collections import deque +from dataclasses import dataclass +from types import TracebackType +from typing import Deque, Optional, Tuple, Type +from warnings import warn + +from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled +from ._compat import DeprecatedAwaitable +from ._eventloop import get_asynclib +from ._exceptions import BusyResourceError, WouldBlock +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this + limiter + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: Tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not + held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: Optional[TaskInfo] + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> "Event": + return get_asynclib().Event() + + def set(self) -> DeprecatedAwaitable: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class Lock: + _owner_task: Optional[TaskInfo] = None + + def __init__(self) -> None: + self._waiters: Deque[Tuple[TaskInfo, Event]] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + task = get_current_task() + event = Event() + token = task, event + self._waiters.append(token) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(token) + elif self._owner_task == task: + self.release() + + raise + + assert self._owner_task == task + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + task = get_current_task() + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + if self._owner_task is not None: + raise WouldBlock + + self._owner_task = task + + def release(self) -> DeprecatedAwaitable: + """Release the lock.""" + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding this lock") + + if self._waiters: + self._owner_task, event = self._waiters.popleft() + event.set() + else: + del self._owner_task + + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._owner_task is not None + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) + + +class Condition: + _owner_task: Optional[TaskInfo] = None + + def __init__(self, lock: Optional[Lock] = None): + self._lock = lock or Lock() + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> DeprecatedAwaitable: + """Release the underlying lock.""" + self._lock.release() + return DeprecatedAwaitable(self.release) + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __init__(self, initial_value: int, *, max_value: Optional[int] = None): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._value = initial_value + self._max_value = max_value + self._waiters: Deque[Event] = deque() + + async def __aenter__(self) -> "Semaphore": + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + await checkpoint_if_cancelled() + try: + self.acquire_nowait() + except WouldBlock: + event = Event() + self._waiters.append(event) + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + else: + self.release() + + raise + else: + try: + await cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~WouldBlock: if the operation would block + + """ + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> DeprecatedAwaitable: + """Increment the semaphore value.""" + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + if self._waiters: + self._waiters.popleft().set() + else: + self._value += 1 + + return DeprecatedAwaitable(self.release) + + @property + def value(self) -> int: + """The current value of the semaphore.""" + return self._value + + @property + def max_value(self) -> Optional[int]: + """The maximum value of the semaphore.""" + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> "CapacityLimiter": + return get_asynclib().CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + async def set_total_tokens(self, value: float) -> None: + warn( + "CapacityLimiter.set_total_tokens has been deprecated. Set the value of the" + '"total_tokens" attribute directly.', + DeprecationWarning, + ) + self.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> DeprecatedAwaitable: + """ + Acquire a token for the current task without waiting for one to become available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + :raises RuntimeError: if the current task has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +def create_lock() -> Lock: + """ + Create an asynchronous lock. + + :return: a lock object + + .. deprecated:: 3.0 + Use :class:`~Lock` directly. + + """ + warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning) + return Lock() + + +def create_condition(lock: Optional[Lock] = None) -> Condition: + """ + Create an asynchronous condition. + + :param lock: the lock to base the condition object on + :return: a condition object + + .. deprecated:: 3.0 + Use :class:`~Condition` directly. + + """ + warn( + "create_condition() is deprecated -- use Condition() directly", + DeprecationWarning, + ) + return Condition(lock=lock) + + +def create_event() -> Event: + """ + Create an asynchronous event object. + + :return: an event object + + .. deprecated:: 3.0 + Use :class:`~Event` directly. + + """ + warn("create_event() is deprecated -- use Event() directly", DeprecationWarning) + return get_asynclib().Event() + + +def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore: + """ + Create an asynchronous semaphore. + + :param value: the semaphore's initial value + :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the + semaphore's value would exceed this number + :return: a semaphore object + + .. deprecated:: 3.0 + Use :class:`~Semaphore` directly. + + """ + warn( + "create_semaphore() is deprecated -- use Semaphore() directly", + DeprecationWarning, + ) + return Semaphore(value, max_value=max_value) + + +def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: + """ + Create a capacity limiter. + + :param total_tokens: the total number of tokens available for borrowing (can be an integer or + :data:`math.inf`) + :return: a capacity limiter object + + .. deprecated:: 3.0 + Use :class:`~CapacityLimiter` directly. + + """ + warn( + "create_capacity_limiter() is deprecated -- use CapacityLimiter() directly", + DeprecationWarning, + ) + return get_asynclib().CapacityLimiter(total_tokens) + + +class ResourceGuard: + __slots__ = "action", "_guarded" + + def __init__(self, action: str): + self.action = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._guarded = False + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_tasks.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_tasks.py new file mode 100644 index 00000000..f24764cf --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,178 @@ +import math +from types import TracebackType +from typing import Optional, Type +from warnings import warn + +from ..abc._tasks import TaskGroup, TaskStatus +from ._compat import ( + DeprecatedAsyncContextManager, + DeprecatedAwaitable, + DeprecatedAwaitableFloat, +) +from ._eventloop import get_asynclib + + +class _IgnoredTaskStatus(TaskStatus): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> "CancelScope": + return get_asynclib().CancelScope(shield=shield, deadline=deadline) + + def cancel(self) -> DeprecatedAwaitable: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> "CancelScope": + raise NotImplementedError + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + raise NotImplementedError + + +def open_cancel_scope(*, shield: bool = False) -> CancelScope: + """ + Open a cancel scope. + + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + .. deprecated:: 3.0 + Use :class:`~CancelScope` directly. + + """ + warn( + "open_cancel_scope() is deprecated -- use CancelScope() directly", + DeprecationWarning, + ) + return get_asynclib().CancelScope(shield=shield) + + +class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): + def __init__(self, cancel_scope: CancelScope): + self._cancel_scope = cancel_scope + + def __enter__(self) -> CancelScope: + return self._cancel_scope.__enter__() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) + if self._cancel_scope.cancel_called: + raise TimeoutError + + return retval + + +def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to + disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\] + + """ + deadline = ( + (get_asynclib().current_time() + delay) if delay is not None else math.inf + ) + cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) + return FailAfterContextManager(cancel_scope) + + +def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` + to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_asynclib().current_time() + delay) if delay is not None else math.inf + ) + return get_asynclib().CancelScope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> DeprecatedAwaitableFloat: + """ + Return the nearest deadline among all the cancel scopes effective for the current task. + + :return: a clock value from the event loop's internal clock (``float('inf')`` if there is no + deadline in effect) + :rtype: float + + """ + return DeprecatedAwaitableFloat( + get_asynclib().current_effective_deadline(), current_effective_deadline + ) + + +def create_task_group() -> "TaskGroup": + """ + Create a task group. + + :return: a task group + + """ + return get_asynclib().TaskGroup() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_testing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_testing.py new file mode 100644 index 00000000..4998753a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_testing.py @@ -0,0 +1,80 @@ +from typing import Any, Awaitable, Generator, Optional, Union + +from ._compat import DeprecatedAwaitableList, _warn_deprecation +from ._eventloop import get_asynclib + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: Optional[int], + name: Optional[str], + coro: Union[Generator, Awaitable[Any]], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: Optional[int] = parent_id + self.name: Optional[str] = name + self.coro: Union[Generator, Awaitable[Any]] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def __await__(self) -> Generator[None, None, "TaskInfo"]: + _warn_deprecation(self) + if False: + yield + + return self + + def _unwrap(self) -> "TaskInfo": + return self + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_asynclib().get_current_task() + + +def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + tasks = get_asynclib().get_running_tasks() + return DeprecatedAwaitableList(tasks, func=get_running_tasks) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_asynclib().wait_all_tasks_blocked() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_typedattr.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 00000000..424836a1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +import sys +from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload + +from ._exceptions import TypedAttributeLookupError + +if sys.version_info >= (3, 8): + from typing import final +else: + from typing_extensions import final + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: Dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding values. + + If the provider wraps another provider, the attributes from that wrapper should also be + included in the returned mapping (but the wrapper may override the callables from the + wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: + ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]: + ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for + :param default: the value that should be returned if no value is found for the attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was + given + + """ + try: + return self.extra_attributes[attribute]() + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/__init__.py new file mode 100644 index 00000000..72c44449 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/__init__.py @@ -0,0 +1,88 @@ +__all__ = ( + "AsyncResource", + "IPAddressType", + "IPSockAddrType", + "SocketAttribute", + "SocketStream", + "SocketListener", + "UDPSocket", + "UNIXSocketStream", + "UDPPacketType", + "ConnectedUDPSocket", + "UnreliableObjectReceiveStream", + "UnreliableObjectSendStream", + "UnreliableObjectStream", + "ObjectReceiveStream", + "ObjectSendStream", + "ObjectStream", + "ByteReceiveStream", + "ByteSendStream", + "ByteStream", + "AnyUnreliableByteReceiveStream", + "AnyUnreliableByteSendStream", + "AnyUnreliableByteStream", + "AnyByteReceiveStream", + "AnyByteSendStream", + "AnyByteStream", + "Listener", + "Process", + "Event", + "Condition", + "Lock", + "Semaphore", + "CapacityLimiter", + "CancelScope", + "TaskGroup", + "TaskStatus", + "TestRunner", + "BlockingPortal", +) + +from typing import Any + +from ._resources import AsyncResource +from ._sockets import ( + ConnectedUDPSocket, + IPAddressType, + IPSockAddrType, + SocketAttribute, + SocketListener, + SocketStream, + UDPPacketType, + UDPSocket, + UNIXSocketStream, +) +from ._streams import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + AnyUnreliableByteReceiveStream, + AnyUnreliableByteSendStream, + AnyUnreliableByteStream, + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + UnreliableObjectReceiveStream, + UnreliableObjectSendStream, + UnreliableObjectStream, +) +from ._subprocesses import Process +from ._tasks import TaskGroup, TaskStatus +from ._testing import TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore +from .._core._tasks import CancelScope +from ..from_thread import BlockingPortal + +# Re-export imports so they look like they live directly in this package +key: str +value: Any +for key, value in list(locals().items()): + if getattr(value, "__module__", "").startswith("anyio.abc."): + value.__module__ = __name__ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_resources.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_resources.py new file mode 100644 index 00000000..4f66c38e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_resources.py @@ -0,0 +1,29 @@ +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Optional, Type, TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, and calls + :meth:`aclose` on exit. + """ + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_sockets.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_sockets.py new file mode 100644 index 00000000..f73e795e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,183 @@ +import socket +from abc import abstractmethod +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + Callable, + Collection, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = Tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = Tuple[bytes, IPSockAddrType] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: Dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: Optional[Tuple[str, int]] = convert( + self._raw_socket.getpeername() + ) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[ + SocketAttribute.local_port + ] = lambda: self._raw_socket.getsockname()[1] + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds( + self, message: bytes, fds: Collection[Union[int, IOBase]] + ) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file or socket + objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + from .. import create_task_group + + context_manager: AsyncContextManager + if task_group is None: + task_group = context_manager = create_task_group() + else: + # Can be replaced with AsyncExitStack once on py3.7+ + context_manager = _NullAsyncContextManager() + + async with context_manager: + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_streams.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_streams.py new file mode 100644 index 00000000..4980ef4f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_streams.py @@ -0,0 +1,198 @@ +from abc import abstractmethod +from typing import Any, Callable, Generic, Optional, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +class UnreliableObjectReceiveStream( + Generic[T_Item], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in which they + were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the given type + parameter. + """ + + def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]": + return self + + async def __anext__(self) -> T_Item: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_Item: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_Item], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the recipient(s) in the + same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_Item) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of message + delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]): + """ + A receive message stream which guarantees that messages are received in the same order in + which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_Item]): + """ + A send message stream which guarantees that messages are delivered in the same order in which + they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more than + 65536 bytes. + """ + + def __aiter__(self) -> "ByteReceiveStream": + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty :class:`bytes` object, + and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this method. + This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling each + accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_subprocesses.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 00000000..1e633fb9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,78 @@ +from abc import abstractmethod +from signal import Signals +from typing import Optional + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> Optional[int]: + """ + The return code of the process. If the process has not yet terminated, this will be + ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> Optional[ByteSendStream]: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> Optional[ByteReceiveStream]: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> Optional[ByteReceiveStream]: + """The stream for the standard error output of the process.""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_tasks.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_tasks.py new file mode 100644 index 00000000..99928a1f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,104 @@ +import typing +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import Any, Callable, Coroutine, Optional, Type, TypeVar +from warnings import warn + +if typing.TYPE_CHECKING: + from anyio._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") + + +class TaskStatus(metaclass=ABCMeta): + @abstractmethod + def started(self, value: object = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + """ + + cancel_scope: "CancelScope" + + async def spawn( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. deprecated:: 3.0 + Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn( + 'spawn() is deprecated -- use start_soon() (without the "await") instead', + DeprecationWarning, + ) + self.start_soon(func, *args, name=name) + + @abstractmethod + def start_soon( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> object: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> "TaskGroup": + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_testing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_testing.py new file mode 100644 index 00000000..4e3621df --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/abc/_testing.py @@ -0,0 +1,68 @@ +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Iterable +from typing import Any, Callable, Coroutine, Dict, Optional, Type, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the same event + loop. + """ + + def __enter__(self) -> "TestRunner": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[types.TracebackType], + ) -> Optional[bool]: + self.close() + return None + + @abstractmethod + def close(self) -> None: + """Close the event loop.""" + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., "AsyncGenerator[_T, Any]"], + kwargs: Dict[str, Any], + ) -> "Iterable[_T]": + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: Dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/from_thread.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/from_thread.py new file mode 100644 index 00000000..e4f871fd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/from_thread.py @@ -0,0 +1,502 @@ +import threading +from asyncio import iscoroutine +from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait +from contextlib import AbstractContextManager, contextmanager +from types import TracebackType +from typing import ( + Any, + AsyncContextManager, + Callable, + ContextManager, + Coroutine, + Dict, + Generator, + Iterable, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from warnings import warn + +from ._core import _eventloop +from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc._tasks import TaskStatus + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co") + + +def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError("This function can only be run from an AnyIO worker thread") + + return asynclib.run_async_from_thread(func, *args) + + +def run_async_from_thread( + func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object +) -> T_Retval: + warn( + "run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead", + DeprecationWarning, + ) + return run(func, *args) + + +def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + asynclib = threadlocals.current_async_module + except AttributeError: + raise RuntimeError("This function can only be run from an AnyIO worker thread") + + return asynclib.run_sync_from_thread(func, *args) + + +def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: + warn( + "run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead", + DeprecationWarning, + ) + return run_sync(func, *args) + + +class _BlockingAsyncContextManager(AbstractContextManager): + _enter_future: Future + _exit_future: Future + _exit_event: Event + _exit_exc_info: Tuple[ + Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType] + ] = (None, None, None) + + def __init__(self, async_cm: AsyncContextManager[T_co], portal: "BlockingPortal"): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> Optional[bool]: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + cm = self._enter_future.result() + return cast(T_co, cm) + + def __exit__( + self, + __exc_type: Optional[Type[BaseException]], + __exc_value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> Optional[bool]: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> "BlockingPortal": + return get_asynclib().BlockingPortal() + + def __init__(self) -> None: + self._event_loop_thread_id: Optional[int] = threading.get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> "BlockingPortal": + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == threading.get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them + finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, func: Callable, args: tuple, kwargs: Dict[str, Any], future: Future + ) -> None: + def callback(f: Future) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + threading.get_ident(), + ): + self.call(scope.cancel) + + try: + retval = func(*args, **kwargs) + if iscoroutine(retval): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval + except self._cancelled_exc_class: + future.cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable, + args: tuple, + kwargs: Dict[str, Any], + name: object, + future: Future, + ) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, or the + exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object + ) -> T_Retval: + ... + + @overload + def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: + ... + + def call( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def spawn_task( + self, + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + ... + + @overload + def spawn_task( + self, func: Callable[..., T_Retval], *args: object, name: object = None + ) -> "Future[T_Retval]": + ... + + def spawn_task( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 2.1 + .. deprecated:: 3.0 + Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you + can keep using this until AnyIO 4. + + """ + warn( + "spawn_task() is deprecated -- use start_task_soon() instead", + DeprecationWarning, + ) + return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] + + @overload + def start_task_soon( + self, + func: Callable[..., Coroutine[Any, Any, T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + ... + + @overload + def start_task_soon( + self, func: Callable[..., T_Retval], *args: object, name: object = None + ) -> "Future[T_Retval]": + ... + + def start_task_soon( + self, + func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]], + *args: object, + name: object = None + ) -> "Future[T_Retval]": + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling the + returned future. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the task completes + successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called from within + the event loop thread + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Coroutine[Any, Any, Any]], + *args: object, + name: object = None + ) -> Tuple["Future[Any]", Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`TaskGroup.start`. + + :param func: the target coroutine function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` is the + value passed to ``task_status.started()`` from within the target function + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AsyncContextManager[T_co] + ) -> ContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the + middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +def create_blocking_portal() -> BlockingPortal: + """ + Create a portal for running functions in the event loop thread from external threads. + + Use this function in asynchronous code when you need to allow external threads access to the + event loop where your asynchronous code is currently running. + + .. deprecated:: 3.0 + Use :class:`.BlockingPortal` directly. + + """ + warn( + "create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() " + "directly", + DeprecationWarning, + ) + return BlockingPortal() + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: Optional[Dict[str, Any]] = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + if future.set_running_or_notify_cancel(): + future.set_result(portal_) + await portal_.sleep_until_stopped() + + future: Future[BlockingPortal] = Future() + with ThreadPoolExecutor(1) as executor: + run_future = executor.submit( + _eventloop.run, + run_portal, # type: ignore[arg-type] + backend=backend, + backend_options=backend_options, + ) + try: + wait( + cast(Iterable[Future], [run_future, future]), + return_when=FIRST_COMPLETED, + ) + except BaseException: + future.cancel() + run_future.cancel() + raise + + if future.done(): + portal = future.result() + try: + yield portal + except BaseException: + portal.call(portal.stop, True) + raise + + portal.call(portal.stop, False) + + run_future.result() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/lowlevel.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/lowlevel.py new file mode 100644 index 00000000..c1da8fa5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/lowlevel.py @@ -0,0 +1,170 @@ +import enum +import sys +from dataclasses import dataclass +from typing import Any, Dict, Generic, Set, TypeVar, Union, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_asynclib + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_asynclib().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + .. versionadded:: 3.0 + + """ + await get_asynclib().cancel_shielded_checkpoint() + + +def current_token() -> object: + """Return a backend specific token object that can be used to get back to the event loop.""" + return get_asynclib().current_token() + + +_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]] +_token_wrappers: Dict[Any, "_TokenWrapper"] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__( + self, var: "RunVar[T]", value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] + ): + self._var = var + self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop.""" + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: Set[_TokenWrapper] = set() + + def __init__( + self, + name: str, + default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET, + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> Dict[str, T]: + token = current_token() + while True: + try: + return _run_vars[token] + except TypeError: + # Happens when token isn't weak referable (TrioToken). + # This workaround does mean that some memory will leak on Trio until the problem + # is fixed on their end. + token = _TokenWrapper(token) + self._token_wrappers.add(token) + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> Union[T, D]: + ... + + @overload + def get(self) -> T: + ... + + def get( + self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET + ) -> Union[T, D]: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/pytest_plugin.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/pytest_plugin.py new file mode 100644 index 00000000..432eee3b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,144 @@ +from contextlib import contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction +from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Tuple, cast + +import pytest +import sniffio +from _pytest.fixtures import FixtureRequest + +from ._core._eventloop import get_all_backends, get_asynclib +from .abc import TestRunner + +if TYPE_CHECKING: + from _pytest.config import Config + +_current_runner: Optional[TestRunner] = None + + +def extract_backend_and_options(backend: object) -> Tuple[str, Dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(Tuple[str, Dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: Dict[str, Any] +) -> Generator[TestRunner, object, None]: + global _current_runner + if _current_runner: + yield _current_runner + return + + asynclib = get_asynclib(backend_name) + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async library + token = sniffio.current_async_library_cvar.set(backend_name) + + try: + backend_options = backend_options or {} + with asynclib.TestRunner(**backend_options) as runner: + _current_runner = runner + yield runner + finally: + _current_runner = None + if token: + sniffio.current_async_library_cvar.reset(token) + + +def pytest_configure(config: "Config") -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run " + "asynchronously via anyio.", + ) + + +def pytest_fixture_setup(fixturedef: Any, request: FixtureRequest) -> None: + def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(func): + yield from runner.run_asyncgen_fixture(func, kwargs) + else: + yield runner.run_fixture(func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests that involve + # the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + has_backend_arg = "anyio_backend" in fixturedef.argnames + fixturedef.func = wrapper + if not has_backend_arg: + fixturedef.argnames += ("anyio_backend",) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> Optional[bool]: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + runner.run_test(pyfuncitem.obj, testargs) + + return True + + return None + + +@pytest.fixture(params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> Dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/buffered.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/buffered.py new file mode 100644 index 00000000..1503b3e4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/buffered.py @@ -0,0 +1,116 @@ +from dataclasses import dataclass, field +from typing import Any, Callable, Mapping + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving + capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes we get from + # the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/file.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/file.py new file mode 100644 index 00000000..938d1daa --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/file.py @@ -0,0 +1,145 @@ +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: Dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: Union[str, "PathLike[str]"]) -> "FileReadStream": + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: Union[str, "PathLike[str]"], append: bool = False + ) -> "FileWriteStream": + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any existing file + at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/memory.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/memory.py new file mode 100644 index 00000000..d8a958c7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/memory.py @@ -0,0 +1,275 @@ +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, + get_cancelled_exc_class, +) +from .._core._compat import DeprecatedAwaitable +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: Deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: "OrderedDict[Event, List[T_Item]]" = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: "OrderedDict[Event, T_Item]" = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_Item: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_Item: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + container: List[T_Item] = [] + self._state.waiting_receivers[receive_event] = container + + try: + await receive_event.wait() + except get_cancelled_exc_class(): + # Ignore the immediate cancellation if we already received an item, so as not to + # lose it + if not container: + raise + finally: + self._state.waiting_receivers.pop(receive_event, None) + + if container: + return container[0] + else: + raise EndOfStream + + def clone(self) -> "MemoryObjectReceiveStream[T_Item]": + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will the + receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> "MemoryObjectReceiveStream[T_Item]": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]): + _state: MemoryObjectStreamState[T_Item] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_Item) -> DeprecatedAwaitable: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + if self._state.waiting_receivers: + receive_event, container = self._state.waiting_receivers.popitem(last=False) + container.append(item) + receive_event.set() + elif len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + return DeprecatedAwaitable(self.send_nowait) + + async def send(self, item: T_Item) -> None: + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] + raise + + if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] + raise BrokenResourceError + + def clone(self) -> "MemoryObjectSendStream[T_Item]": + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will the + sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special case for the + benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> "MemoryObjectSendStream[T_Item]": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/stapled.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/stapled.py new file mode 100644 index 00000000..a71ffb0d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/stapled.py @@ -0,0 +1,138 @@ +from dataclasses import dataclass +from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners moved into + this one. + + Extra attributes are provided from each listener, with each successive listener overriding any + conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: List[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/text.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/text.py new file mode 100644 index 00000000..ccb683c7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/text.py @@ -0,0 +1,141 @@ +import codecs +from dataclasses import InitVar, dataclass, field +from typing import Any, Callable, Mapping, Tuple + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely + received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults to + ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes (defaults to + ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., Tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on + send. + + Extra attributes will be provided from both streams, with the receive stream providing the + values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/tls.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/tls.py new file mode 100644 index 00000000..c8e19e2a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/streams/tls.py @@ -0,0 +1,317 @@ +import logging +import re +import ssl +from dataclasses import dataclass +from functools import wraps +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +T_Retval = TypeVar("T_Retval") +_PCTRTT = Tuple[Tuple[str, str], ...] +_PCTRTTT = Tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: Optional[str] = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: Tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more + #: information) + peer_certificate: Optional[ + Dict[str, Union[str, _PCTRTTT, _PCTRTT]] + ] = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: Optional[bytes] = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared between both ends of the TLS connection + shared_ciphers: List[Tuple[str, str, int]] = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being + #: closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: Optional[bool] = None, + hostname: Optional[str] = None, + ssl_context: Optional[ssl.SSLContext] = None, + standard_compatible: bool = True, + ) -> "TLSStream": + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, ``False`` if + this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been + provided, ``False`` otherwise). Used only to create a default context when an explicit + context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default will be + created) + :param standard_compatible: if ``False``, skip the closing handshake when closing the + connection, and don't raise an exception if the peer does the same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined] + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[..., T_Retval], *args: object + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if ( + isinstance(exc, ssl.SSLEOFError) + or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> Tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session on every + accepted connection. + + If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is + called to do whatever post-mortem processing is deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + f""" + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the ``{__name__}`` + logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + logging.getLogger(__name__).exception("Error during TLS handshake") + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: Optional[TaskGroup] = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_process.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_process.py new file mode 100644 index 00000000..39a31730 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_process.py @@ -0,0 +1,247 @@ +import os +import pickle +import subprocess +import sys +from collections import deque +from importlib.util import module_from_spec, spec_from_file_location +from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast + +from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +_process_pool_workers: RunVar[Set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the worker process running it will be abruptly terminated using SIGKILL (or + ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's running + :param limiter: capacity limiter to use to limit the total amount of processes running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_asynclib().setup_process_pool_exit_at_shutdown(workers) + + async with (limiter or current_default_process_limiter()): + # Pop processes from the pool (starting from the most recently used) until we find one that + # hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or + # longer + now = current_time() + killed_processes: List[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process, idle_since = idle_workers.popleft() + process.kill() + workers.remove(process) + killed_processes.append(process) + + with CancelScope(shield=True): + for process in killed_processes: + await process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: Optional[str] + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path: + # Load the parent's main module but as __mp_main__ instead of __main__ + # (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_thread.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_thread.py new file mode 100644 index 00000000..a2fd42f4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/anyio/to_thread.py @@ -0,0 +1,65 @@ +from typing import Callable, Optional, TypeVar +from warnings import warn + +from ._core._eventloop import get_asynclib +from .abc import CapacityLimiter + +T_Retval = TypeVar("T_Retval") + + +async def run_sync( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, + the thread will still run its course but its return value (or any raised exception) will be + ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + return await get_asynclib().run_sync_in_worker_thread( + func, *args, cancellable=cancellable, limiter=limiter + ) + + +async def run_sync_in_worker_thread( + func: Callable[..., T_Retval], + *args: object, + cancellable: bool = False, + limiter: Optional[CapacityLimiter] = None +) -> T_Retval: + warn( + "run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead", + DeprecationWarning, + ) + return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of concurrent threads. + + :return: a capacity limiter object + + """ + return get_asynclib().current_default_thread_limiter() + + +def current_default_worker_thread_limiter() -> CapacityLimiter: + warn( + "current_default_worker_thread_limiter() has been deprecated, " + "use anyio.to_thread.current_default_thread_limiter() instead", + DeprecationWarning, + ) + return current_default_thread_limiter() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/LICENSE.rst b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/LICENSE.rst new file mode 100644 index 00000000..d12a8491 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/METADATA new file mode 100644 index 00000000..8e5dc1e0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.1.3 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Website: https://palletsprojects.com/p/click +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/RECORD new file mode 100644 index 00000000..d5b2ceb8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/RECORD @@ -0,0 +1,39 @@ +click-8.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.3.dist-info/METADATA,sha256=tFJIX5lOjx7c5LjZbdTPFVDJSgyv9F74XY0XCPp_gnc,3247 +click-8.1.3.dist-info/RECORD,, +click-8.1.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +click-8.1.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=rQBLutqg-z6m8nOzivIfigDn_emijB_dKv9BZ2FNi5s,3138 +click/__pycache__/__init__.cpython-37.pyc,, +click/__pycache__/_compat.cpython-37.pyc,, +click/__pycache__/_termui_impl.cpython-37.pyc,, +click/__pycache__/_textwrap.cpython-37.pyc,, +click/__pycache__/_winconsole.cpython-37.pyc,, +click/__pycache__/core.cpython-37.pyc,, +click/__pycache__/decorators.cpython-37.pyc,, +click/__pycache__/exceptions.cpython-37.pyc,, +click/__pycache__/formatting.cpython-37.pyc,, +click/__pycache__/globals.cpython-37.pyc,, +click/__pycache__/parser.cpython-37.pyc,, +click/__pycache__/shell_completion.cpython-37.pyc,, +click/__pycache__/termui.cpython-37.pyc,, +click/__pycache__/testing.cpython-37.pyc,, +click/__pycache__/types.cpython-37.pyc,, +click/__pycache__/utils.cpython-37.pyc,, +click/_compat.py,sha256=JIHLYs7Jzz4KT9t-ds4o4jBzLjnwCiJQKqur-5iwCKI,18810 +click/_termui_impl.py,sha256=qK6Cfy4mRFxvxE8dya8RBhLpSC8HjF-lvBc6aNrPdwg,23451 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=mz87bYEKzIoNYEa56BFAiOJnvt1Y0L-i7wD4_ZecieE,112782 +click/decorators.py,sha256=yo3zvzgUm5q7h5CXjyV6q3h_PJAiUaem178zXwdWUFI,16350 +click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=qOp_BeC9esEOSZKyu5G7RIxEUaLsXUX-mTb7hB1r4QY,18018 +click/termui.py,sha256=ACBQVOvFCTSqtD5VREeCAdRtlHd-Imla-Lte4wSfMjA,28355 +click/testing.py,sha256=ptpMYgRY7dVfE3UDgkgwayu9ePw98sQI3D7zZXiCpj4,16063 +click/types.py,sha256=rEb1aZSQKq3ciCMmjpG2Uva9vk498XRL7ThrcK2GRss,35805 +click/utils.py,sha256=33D6E7poH_nrKB-xr-UyDEXnxOcCiQqxuRLtrqeVv6o,18682 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/top_level.txt new file mode 100644 index 00000000..dca9a909 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click-8.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/__init__.py new file mode 100644 index 00000000..e3ef423b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.3" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_compat.py new file mode 100644 index 00000000..766d286b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_compat.py @@ -0,0 +1,626 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( + "SERVER_SOFTWARE", "" +) +WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def get_filesystem_encoding() -> str: + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO, default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO, default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO, bool], bool], + find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, os.PathLike, int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO, bool]: + binary = "b" in mode + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO, af), True + + +class _AtomicFile: + def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO] +) -> t.Callable[[], t.TextIO]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.TextIO: + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_termui_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_termui_impl.py new file mode 100644 index 00000000..4b979bcc --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_termui_impl.py @@ -0,0 +1,717 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or "" + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width = width + self.autowidth = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width: t.Optional[int] = None + self.entered = False + self.current_item: t.Optional[V] = None + self.is_hidden = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar": + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_textwrap.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_textwrap.py new file mode 100644 index 00000000..b47dcbd4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_winconsole.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_winconsole.py new file mode 100644 index 00000000..6b20df31 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/core.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/core.py new file mode 100644 index 00000000..5abfb0f3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/core.py @@ -0,0 +1,2998 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import partial +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.Dict[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", t.Callable[..., t.Any]], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = other_cmd.callback + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.Dict[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invokable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() from None + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + """ + if complete_var is None: + complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.Dict[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + text = self.help if self.help is not None else "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + text = inspect.cleandoc(text).partition("\f")[0] + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) # type: ignore + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commmands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.Dict[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + func: t.Optional[t.Callable] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + convert: t.Callable[[t.Any], t.Any] = partial( + self.type, param=self, ctx=ctx + ) + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Tuple: + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Tuple: + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + if is_flag and default_is_missing and not self.required: + self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False + + if flag_value is None: + flag_value = not self.default + + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + if self.multiple and self.is_flag: + raise TypeError("'multiple' is not valid with 'is_flag', use 'count'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value # type: ignore + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/decorators.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/decorators.py new file mode 100644 index 00000000..28618dc5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/decorators.py @@ -0,0 +1,497 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +FC = t.TypeVar("FC", bound=t.Union[t.Callable[..., t.Any], Command]) + + +def pass_context(f: F) -> F: + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def pass_obj(f: F) -> F: + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args, **kwargs): # type: ignore + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def make_pass_decorator( + object_type: t.Type, ensure: bool = False +) -> "t.Callable[[F], F]": + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[F], F]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: F) -> F: + def new_func(*args, **kwargs): # type: ignore + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +@t.overload +def command( + __func: t.Callable[..., t.Any], +) -> Command: + ... + + +@t.overload +def command( + name: t.Optional[str] = None, + **attrs: t.Any, +) -> t.Callable[..., Command]: + ... + + +@t.overload +def command( + name: t.Optional[str] = None, + cls: t.Type[CmdType] = ..., + **attrs: t.Any, +) -> t.Callable[..., CmdType]: + ... + + +def command( + name: t.Union[str, t.Callable[..., t.Any], None] = None, + cls: t.Optional[t.Type[Command]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[..., Command]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[..., t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = Command + + def decorator(f: t.Callable[..., t.Any]) -> Command: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + cmd = cls( # type: ignore[misc] + name=name or f.__name__.lower().replace("_", "-"), # type: ignore[arg-type] + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +@t.overload +def group( + __func: t.Callable[..., t.Any], +) -> Group: + ... + + +@t.overload +def group( + name: t.Optional[str] = None, + **attrs: t.Any, +) -> t.Callable[[F], Group]: + ... + + +def group( + name: t.Union[str, t.Callable[..., t.Any], None] = None, **attrs: t.Any +) -> t.Union[Group, t.Callable[[F], Group]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if attrs.get("cls") is None: + attrs["cls"] = Group + + if callable(name): + grp: t.Callable[[F], Group] = t.cast(Group, command(**attrs)) + return grp(name) + + return t.cast(Group, command(name, **attrs)) + + +def _param_memo(f: FC, param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + + def decorator(f: FC) -> FC: + ArgumentClass = attrs.pop("cls", None) or Argument + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + + return decorator + + +def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + + def decorator(f: FC) -> FC: + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + OptionClass = option_attrs.pop("cls", None) or Option + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + t.cast(str, message) + % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/exceptions.py new file mode 100644 index 00000000..9e20b3eb --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/exceptions.py @@ -0,0 +1,287 @@ +import os +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo + +if t.TYPE_CHECKING: + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename = os.fsdecode(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code = code diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/formatting.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/formatting.py new file mode 100644 index 00000000..ddd2a2f8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/globals.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/globals.py new file mode 100644 index 00000000..480058f1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/parser.py new file mode 100644 index 00000000..2d5a2ed7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: str, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/shell_completion.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/shell_completion.py new file mode 100644 index 00000000..c17a8e64 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/shell_completion.py @@ -0,0 +1,580 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value = value + self.type = type + self.help = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +compdef %(complete_func)s %(prog_name)s; +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response; + + for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + set response $response $value; + end; + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.Dict[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + def _check_version(self) -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + raise RuntimeError( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ) + ) + else: + raise RuntimeError( + _("Couldn't detect Bash version, shell completion is not supported.") + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: t.Type[ShellComplete], name: t.Optional[str] = None +) -> None: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + value = ctx.params[param.name] + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str] +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/termui.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/termui.py new file mode 100644 index 00000000..bfb2f5ae --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/termui.py @@ -0,0 +1,787 @@ +import inspect +import io +import itertools +import os +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from ._compat import WIN +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name # type: ignore + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + if WIN: + os.system("cls") + else: + sys.stdout.write("\033[2J\033[1;1H") + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/testing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/testing.py new file mode 100644 index 00000000..e395c2ed --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO, input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(t.cast(bytes, input)) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) # type: ignore[type-var] + os.chdir(dt) + + try: + yield t.cast(str, dt) + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/types.py new file mode 100644 index 00000000..b45ee53d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/types.py @@ -0,0 +1,1073 @@ +import os +import stat +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: t.Any) -> bool: + if self.lazy is not None: + return self.lazy + if value == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + if hasattr(value, "read") or hasattr(value, "write"): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f: t.IO = t.cast( + t.IO, + LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ), + ) + + if ctx is not None: + ctx.call_on_close(f.close_intelligently) # type: ignore + + return f + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{os.fsdecode(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result(self, rv: t.Any) -> t.Any: + if self.type is not None and not isinstance(rv, self.type): + if self.type is str: + rv = os.fsdecode(rv) + elif self.type is bytes: + rv = os.fsencode(rv) + else: + rv = self.type(rv) + + return rv + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=os.fsdecode(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None: + self.types = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/utils.py new file mode 100644 index 00000000..8283788a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/click/utils.py @@ -0,0 +1,580 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import get_filesystem_encoding +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: F) -> F: + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args, **kwargs): # type: ignore + try: + return func(*args, **kwargs) + except Exception: + pass + + return update_wrapper(t.cast(F, wrapper), func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO] + + if filename == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO) -> None: + self._file = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic)) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO, KeepOpenFile(f)) + + return f + + +def format_filename( + filename: t.Union[str, bytes, os.PathLike], shorten: bool = False +) -> str: + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + + return os.fsdecode(filename) + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + if getattr(_main, "__package__", None) is None or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/__init__.py new file mode 100644 index 00000000..3512d101 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/__init__.py @@ -0,0 +1,49 @@ +from typing import Any, Optional + +from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, + unset_key) + + +def load_ipython_extension(ipython: Any) -> None: + from .ipython import load_ipython_extension + load_ipython_extension(ipython) + + +def get_cli_string( + path: Optional[str] = None, + action: Optional[str] = None, + key: Optional[str] = None, + value: Optional[str] = None, + quote: Optional[str] = None, +): + """Returns a string suitable for running as a shell script. + + Useful for converting a arguments passed to a fabric task + to be passed to a `local` or `run` command. + """ + command = ['dotenv'] + if quote: + command.append('-q %s' % quote) + if path: + command.append('-f %s' % path) + if action: + command.append(action) + if key: + command.append(key) + if value: + if ' ' in value: + command.append('"%s"' % value) + else: + command.append(value) + + return ' '.join(command).strip() + + +__all__ = ['get_cli_string', + 'load_dotenv', + 'dotenv_values', + 'get_key', + 'set_key', + 'unset_key', + 'find_dotenv', + 'load_ipython_extension'] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/cli.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/cli.py new file mode 100644 index 00000000..b7ae24af --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/cli.py @@ -0,0 +1,164 @@ +import os +import sys +from subprocess import Popen +from typing import Any, Dict, List + +try: + import click +except ImportError: + sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' + 'Run pip install "python-dotenv[cli]" to fix this.') + sys.exit(1) + +from .main import dotenv_values, get_key, set_key, unset_key +from .version import __version__ + + +@click.group() +@click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'), + type=click.Path(file_okay=True), + help="Location of the .env file, defaults to .env file in current working directory.") +@click.option('-q', '--quote', default='always', + type=click.Choice(['always', 'never', 'auto']), + help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.option('-e', '--export', default=False, + type=click.BOOL, + help="Whether to write the dot file as an executable bash script.") +@click.version_option(version=__version__) +@click.pass_context +def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: + '''This script is used to set, get or unset values from a .env file.''' + ctx.obj = {} + ctx.obj['QUOTE'] = quote + ctx.obj['EXPORT'] = export + ctx.obj['FILE'] = file + + +@cli.command() +@click.pass_context +def list(ctx: click.Context) -> None: + '''Display all the stored key/value.''' + file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + 'Path "%s" does not exist.' % (file), + ctx=ctx + ) + dotenv_as_dict = dotenv_values(file) + for k, v in dotenv_as_dict.items(): + click.echo('%s=%s' % (k, v)) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +@click.argument('value', required=True) +def set(ctx: click.Context, key: Any, value: Any) -> None: + '''Store the given key/value.''' + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + export = ctx.obj['EXPORT'] + success, key, value = set_key(file, key, value, quote, export) + if success: + click.echo('%s=%s' % (key, value)) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def get(ctx: click.Context, key: Any) -> None: + '''Retrieve the value for the given key.''' + file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + 'Path "%s" does not exist.' % (file), + ctx=ctx + ) + stored_value = get_key(file, key) + if stored_value: + click.echo(stored_value) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def unset(ctx: click.Context, key: Any) -> None: + '''Removes the given key.''' + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + success, key = unset_key(file, key, quote) + if success: + click.echo("Successfully removed %s" % key) + else: + exit(1) + + +@cli.command(context_settings={'ignore_unknown_options': True}) +@click.pass_context +@click.option( + "--override/--no-override", + default=True, + help="Override variables from the environment file with those from the .env file.", +) +@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) +def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: + """Run command with environment variables present.""" + file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + 'Invalid value for \'-f\' "%s" does not exist.' % (file), + ctx=ctx + ) + dotenv_as_dict = { + k: v + for (k, v) in dotenv_values(file).items() + if v is not None and (override or k not in os.environ) + } + + if not commandline: + click.echo('No command given.') + exit(1) + ret = run_command(commandline, dotenv_as_dict) + exit(ret) + + +def run_command(command: List[str], env: Dict[str, str]) -> int: + """Run command in sub process. + + Runs the command in a sub process with the variables from `env` + added in the current environment variables. + + Parameters + ---------- + command: List[str] + The command and it's parameters + env: Dict + The additional environment variables + + Returns + ------- + int + The return code of the command + + """ + # copy the current environment variables and add the vales from + # `env` + cmd_env = os.environ.copy() + cmd_env.update(env) + + p = Popen(command, + universal_newlines=True, + bufsize=0, + shell=False, + env=cmd_env) + _, _ = p.communicate() + + return p.returncode + + +if __name__ == "__main__": + cli() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/ipython.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/ipython.py new file mode 100644 index 00000000..7df727cd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/ipython.py @@ -0,0 +1,39 @@ +from IPython.core.magic import Magics, line_magic, magics_class # type: ignore +from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore + parse_argstring) # type: ignore + +from .main import find_dotenv, load_dotenv + + +@magics_class +class IPythonDotEnv(Magics): + + @magic_arguments() + @argument( + '-o', '--override', action='store_true', + help="Indicate to override existing variables" + ) + @argument( + '-v', '--verbose', action='store_true', + help="Indicate function calls to be verbose" + ) + @argument('dotenv_path', nargs='?', type=str, default='.env', + help='Search in increasingly higher folders for the `dotenv_path`') + @line_magic + def dotenv(self, line): + args = parse_argstring(self.dotenv, line) + # Locate the .env file + dotenv_path = args.dotenv_path + try: + dotenv_path = find_dotenv(dotenv_path, True, True) + except IOError: + print("cannot find .env file") + return + + # Load the .env file + load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) + + +def load_ipython_extension(ipython): + """Register the %dotenv magic.""" + ipython.register_magics(IPythonDotEnv) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/main.py new file mode 100644 index 00000000..20ac61ba --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/main.py @@ -0,0 +1,373 @@ +import io +import logging +import os +import shutil +import sys +import tempfile +from collections import OrderedDict +from contextlib import contextmanager +from typing import (IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, + Union) + +from .parser import Binding, parse_stream +from .variables import parse_variables + +logger = logging.getLogger(__name__) + +if sys.version_info >= (3, 6): + _PathLike = os.PathLike +else: + _PathLike = str + + +def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: + for mapping in mappings: + if mapping.error: + logger.warning( + "Python-dotenv could not parse statement starting at line %s", + mapping.original.line, + ) + yield mapping + + +class DotEnv(): + def __init__( + self, + dotenv_path: Optional[Union[str, _PathLike]], + stream: Optional[IO[str]] = None, + verbose: bool = False, + encoding: Union[None, str] = None, + interpolate: bool = True, + override: bool = True, + ) -> None: + self.dotenv_path = dotenv_path # type: Optional[Union[str, _PathLike]] + self.stream = stream # type: Optional[IO[str]] + self._dict = None # type: Optional[Dict[str, Optional[str]]] + self.verbose = verbose # type: bool + self.encoding = encoding # type: Union[None, str] + self.interpolate = interpolate # type: bool + self.override = override # type: bool + + @contextmanager + def _get_stream(self) -> Iterator[IO[str]]: + if self.dotenv_path and os.path.isfile(self.dotenv_path): + with io.open(self.dotenv_path, encoding=self.encoding) as stream: + yield stream + elif self.stream is not None: + yield self.stream + else: + if self.verbose: + logger.info( + "Python-dotenv could not find configuration file %s.", + self.dotenv_path or '.env', + ) + yield io.StringIO('') + + def dict(self) -> Dict[str, Optional[str]]: + """Return dotenv as dict""" + if self._dict: + return self._dict + + raw_values = self.parse() + + if self.interpolate: + self._dict = OrderedDict(resolve_variables(raw_values, override=self.override)) + else: + self._dict = OrderedDict(raw_values) + + return self._dict + + def parse(self) -> Iterator[Tuple[str, Optional[str]]]: + with self._get_stream() as stream: + for mapping in with_warn_for_invalid_lines(parse_stream(stream)): + if mapping.key is not None: + yield mapping.key, mapping.value + + def set_as_environment_variables(self) -> bool: + """ + Load the current dotenv as system environment variable. + """ + for k, v in self.dict().items(): + if k in os.environ and not self.override: + continue + if v is not None: + os.environ[k] = v + + return True + + def get(self, key: str) -> Optional[str]: + """ + """ + data = self.dict() + + if key in data: + return data[key] + + if self.verbose: + logger.warning("Key %s not found in %s.", key, self.dotenv_path) + + return None + + +def get_key( + dotenv_path: Union[str, _PathLike], + key_to_get: str, + encoding: Optional[str] = "utf-8", +) -> Optional[str]: + """ + Get the value of a given key from the given .env. + + Returns `None` if the key isn't found or doesn't have a value. + """ + return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) + + +@contextmanager +def rewrite( + path: Union[str, _PathLike], + encoding: Optional[str], +) -> Iterator[Tuple[IO[str], IO[str]]]: + try: + if not os.path.isfile(path): + with io.open(path, "w+", encoding=encoding) as source: + source.write("") + with tempfile.NamedTemporaryFile(mode="w+", delete=False, encoding=encoding) as dest: + with io.open(path, encoding=encoding) as source: + yield (source, dest) # type: ignore + except BaseException: + if os.path.isfile(dest.name): + os.unlink(dest.name) + raise + else: + shutil.move(dest.name, path) + + +def set_key( + dotenv_path: Union[str, _PathLike], + key_to_set: str, + value_to_set: str, + quote_mode: str = "always", + export: bool = False, + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str, str]: + """ + Adds or Updates a key/value to the given .env + + If the .env path given doesn't exist, fails instead of risking creating + an orphan .env somewhere in the filesystem + """ + if quote_mode not in ("always", "auto", "never"): + raise ValueError("Unknown quote_mode: {}".format(quote_mode)) + + quote = ( + quote_mode == "always" + or (quote_mode == "auto" and not value_to_set.isalnum()) + ) + + if quote: + value_out = "'{}'".format(value_to_set.replace("'", "\\'")) + else: + value_out = value_to_set + if export: + line_out = 'export {}={}\n'.format(key_to_set, value_out) + else: + line_out = "{}={}\n".format(key_to_set, value_out) + + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + replaced = False + missing_newline = False + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_set: + dest.write(line_out) + replaced = True + else: + dest.write(mapping.original.string) + missing_newline = not mapping.original.string.endswith("\n") + if not replaced: + if missing_newline: + dest.write("\n") + dest.write(line_out) + + return True, key_to_set, value_to_set + + +def unset_key( + dotenv_path: Union[str, _PathLike], + key_to_unset: str, + quote_mode: str = "always", + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str]: + """ + Removes a given key from the given .env + + If the .env path given doesn't exist, fails + If the given key doesn't exist in the .env, fails + """ + if not os.path.exists(dotenv_path): + logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) + return None, key_to_unset + + removed = False + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_unset: + removed = True + else: + dest.write(mapping.original.string) + + if not removed: + logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path) + return None, key_to_unset + + return removed, key_to_unset + + +def resolve_variables( + values: Iterable[Tuple[str, Optional[str]]], + override: bool, +) -> Mapping[str, Optional[str]]: + new_values = {} # type: Dict[str, Optional[str]] + + for (name, value) in values: + if value is None: + result = None + else: + atoms = parse_variables(value) + env = {} # type: Dict[str, Optional[str]] + if override: + env.update(os.environ) # type: ignore + env.update(new_values) + else: + env.update(new_values) + env.update(os.environ) # type: ignore + result = "".join(atom.resolve(env) for atom in atoms) + + new_values[name] = result + + return new_values + + +def _walk_to_root(path: str) -> Iterator[str]: + """ + Yield directories starting from the given directory up to the root + """ + if not os.path.exists(path): + raise IOError('Starting path not found') + + if os.path.isfile(path): + path = os.path.dirname(path) + + last_dir = None + current_dir = os.path.abspath(path) + while last_dir != current_dir: + yield current_dir + parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) + last_dir, current_dir = current_dir, parent_dir + + +def find_dotenv( + filename: str = '.env', + raise_error_if_not_found: bool = False, + usecwd: bool = False, +) -> str: + """ + Search in increasingly higher folders for the given file + + Returns path to the file if found, or an empty string otherwise + """ + + def _is_interactive(): + """ Decide whether this is running in a REPL or IPython notebook """ + main = __import__('__main__', None, None, fromlist=['__file__']) + return not hasattr(main, '__file__') + + if usecwd or _is_interactive() or getattr(sys, 'frozen', False): + # Should work without __file__, e.g. in REPL or IPython notebook. + path = os.getcwd() + else: + # will work for .py files + frame = sys._getframe() + current_file = __file__ + + while frame.f_code.co_filename == current_file: + assert frame.f_back is not None + frame = frame.f_back + frame_filename = frame.f_code.co_filename + path = os.path.dirname(os.path.abspath(frame_filename)) + + for dirname in _walk_to_root(path): + check_path = os.path.join(dirname, filename) + if os.path.isfile(check_path): + return check_path + + if raise_error_if_not_found: + raise IOError('File not found') + + return '' + + +def load_dotenv( + dotenv_path: Union[str, _PathLike, None] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + override: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> bool: + """Parse a .env file and then load all the variables found as environment variables. + + - *dotenv_path*: absolute or relative path to .env file. + - *stream*: Text stream (such as `io.StringIO`) with .env content, used if + `dotenv_path` is `None`. + - *verbose*: whether to output a warning the .env file is missing. Defaults to + `False`. + - *override*: whether to override the system environment variables with the variables + in `.env` file. Defaults to `False`. + - *encoding*: encoding to be used to read the file. + + If both `dotenv_path` and `stream`, `find_dotenv()` is used to find the .env file. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + dotenv = DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=override, + encoding=encoding, + ) + return dotenv.set_as_environment_variables() + + +def dotenv_values( + dotenv_path: Union[str, _PathLike, None] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> Dict[str, Optional[str]]: + """ + Parse a .env file and return its content as a dict. + + - *dotenv_path*: absolute or relative path to .env file. + - *stream*: `StringIO` object with .env content, used if `dotenv_path` is `None`. + - *verbose*: whether to output a warning the .env file is missing. Defaults to + `False`. + in `.env` file. Defaults to `False`. + - *encoding*: encoding to be used to read the file. + + If both `dotenv_path` and `stream`, `find_dotenv()` is used to find the .env file. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + return DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=True, + encoding=encoding, + ).dict() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/parser.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/parser.py new file mode 100644 index 00000000..398bd49a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/parser.py @@ -0,0 +1,182 @@ +import codecs +import re +from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 + Pattern, Sequence, Tuple) + + +def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: + return re.compile(string, re.UNICODE | extra_flags) + + +_newline = make_regex(r"(\r\n|\n|\r)") +_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) +_whitespace = make_regex(r"[^\S\r\n]*") +_export = make_regex(r"(?:export[^\S\r\n]+)?") +_single_quoted_key = make_regex(r"'([^']+)'") +_unquoted_key = make_regex(r"([^=\#\s]+)") +_equal_sign = make_regex(r"(=[^\S\r\n]*)") +_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") +_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') +_unquoted_value = make_regex(r"([^\r\n]*)") +_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") +_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") +_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") +_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") +_single_quote_escapes = make_regex(r"\\[\\']") + + +Original = NamedTuple( + "Original", + [ + ("string", str), + ("line", int), + ], +) + +Binding = NamedTuple( + "Binding", + [ + ("key", Optional[str]), + ("value", Optional[str]), + ("original", Original), + ("error", bool), + ], +) + + +class Position: + def __init__(self, chars: int, line: int) -> None: + self.chars = chars + self.line = line + + @classmethod + def start(cls) -> "Position": + return cls(chars=0, line=1) + + def set(self, other: "Position") -> None: + self.chars = other.chars + self.line = other.line + + def advance(self, string: str) -> None: + self.chars += len(string) + self.line += len(re.findall(_newline, string)) + + +class Error(Exception): + pass + + +class Reader: + def __init__(self, stream: IO[str]) -> None: + self.string = stream.read() + self.position = Position.start() + self.mark = Position.start() + + def has_next(self) -> bool: + return self.position.chars < len(self.string) + + def set_mark(self) -> None: + self.mark.set(self.position) + + def get_marked(self) -> Original: + return Original( + string=self.string[self.mark.chars:self.position.chars], + line=self.mark.line, + ) + + def peek(self, count: int) -> str: + return self.string[self.position.chars:self.position.chars + count] + + def read(self, count: int) -> str: + result = self.string[self.position.chars:self.position.chars + count] + if len(result) < count: + raise Error("read: End of string") + self.position.advance(result) + return result + + def read_regex(self, regex: Pattern[str]) -> Sequence[str]: + match = regex.match(self.string, self.position.chars) + if match is None: + raise Error("read_regex: Pattern not found") + self.position.advance(self.string[match.start():match.end()]) + return match.groups() + + +def decode_escapes(regex: Pattern[str], string: str) -> str: + def decode_match(match: Match[str]) -> str: + return codecs.decode(match.group(0), 'unicode-escape') # type: ignore + + return regex.sub(decode_match, string) + + +def parse_key(reader: Reader) -> Optional[str]: + char = reader.peek(1) + if char == "#": + return None + elif char == "'": + (key,) = reader.read_regex(_single_quoted_key) + else: + (key,) = reader.read_regex(_unquoted_key) + return key + + +def parse_unquoted_value(reader: Reader) -> str: + (part,) = reader.read_regex(_unquoted_value) + return re.sub(r"\s+#.*", "", part).rstrip() + + +def parse_value(reader: Reader) -> str: + char = reader.peek(1) + if char == u"'": + (value,) = reader.read_regex(_single_quoted_value) + return decode_escapes(_single_quote_escapes, value) + elif char == u'"': + (value,) = reader.read_regex(_double_quoted_value) + return decode_escapes(_double_quote_escapes, value) + elif char in (u"", u"\n", u"\r"): + return u"" + else: + return parse_unquoted_value(reader) + + +def parse_binding(reader: Reader) -> Binding: + reader.set_mark() + try: + reader.read_regex(_multiline_whitespace) + if not reader.has_next(): + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=False, + ) + reader.read_regex(_export) + key = parse_key(reader) + reader.read_regex(_whitespace) + if reader.peek(1) == "=": + reader.read_regex(_equal_sign) + value = parse_value(reader) # type: Optional[str] + else: + value = None + reader.read_regex(_comment) + reader.read_regex(_end_of_line) + return Binding( + key=key, + value=value, + original=reader.get_marked(), + error=False, + ) + except Error: + reader.read_regex(_rest_of_line) + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=True, + ) + + +def parse_stream(stream: IO[str]) -> Iterator[Binding]: + reader = Reader(stream) + while reader.has_next(): + yield parse_binding(reader) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/py.typed new file mode 100644 index 00000000..7632ecf7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/variables.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/variables.py new file mode 100644 index 00000000..d77b700c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/variables.py @@ -0,0 +1,88 @@ +import re +from abc import ABCMeta +from typing import Iterator, Mapping, Optional, Pattern + +_posix_variable = re.compile( + r""" + \$\{ + (?P[^\}:]*) + (?::- + (?P[^\}]*) + )? + \} + """, + re.VERBOSE, +) # type: Pattern[str] + + +class Atom(): + __metaclass__ = ABCMeta + + def __ne__(self, other: object) -> bool: + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + return not result + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + raise NotImplementedError + + +class Literal(Atom): + def __init__(self, value: str) -> None: + self.value = value + + def __repr__(self) -> str: + return "Literal(value={})".format(self.value) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return self.value == other.value + + def __hash__(self) -> int: + return hash((self.__class__, self.value)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + return self.value + + +class Variable(Atom): + def __init__(self, name: str, default: Optional[str]) -> None: + self.name = name + self.default = default + + def __repr__(self) -> str: + return "Variable(name={}, default={})".format(self.name, self.default) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return (self.name, self.default) == (other.name, other.default) + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.default)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + default = self.default if self.default is not None else "" + result = env.get(self.name, default) + return result if result is not None else "" + + +def parse_variables(value: str) -> Iterator[Atom]: + cursor = 0 + + for match in _posix_variable.finditer(value): + (start, end) = match.span() + name = match.groupdict()["name"] + default = match.groupdict()["default"] + + if start > cursor: + yield Literal(value=value[cursor:start]) + + yield Variable(name=name, default=default) + cursor = end + + length = len(value) + if cursor < length: + yield Literal(value=value[cursor:length]) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/version.py new file mode 100644 index 00000000..5f4bb0b3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/dotenv/version.py @@ -0,0 +1 @@ +__version__ = "0.20.0" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/LICENSE new file mode 100644 index 00000000..3e92463e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/METADATA new file mode 100644 index 00000000..482fd434 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/METADATA @@ -0,0 +1,551 @@ +Metadata-Version: 2.1 +Name: fastapi +Version: 0.81.0 +Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production +Home-page: https://github.com/tiangolo/fastapi +Author: Sebastián Ramírez +Author-email: tiangolo@gmail.com +Requires-Python: >=3.6.1 +Description-Content-Type: text/markdown +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: FastAPI +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Dist: starlette==0.19.1 +Requires-Dist: pydantic >=1.6.2,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<2.0.0 +Requires-Dist: requests >=2.24.0,<3.0.0 ; extra == "all" +Requires-Dist: jinja2 >=2.11.2,<4.0.0 ; extra == "all" +Requires-Dist: python-multipart >=0.0.5,<0.0.6 ; extra == "all" +Requires-Dist: itsdangerous >=1.1.0,<3.0.0 ; extra == "all" +Requires-Dist: pyyaml >=5.3.1,<7.0.0 ; extra == "all" +Requires-Dist: ujson >=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0 ; extra == "all" +Requires-Dist: orjson >=3.2.1,<4.0.0 ; extra == "all" +Requires-Dist: email_validator >=1.1.1,<2.0.0 ; extra == "all" +Requires-Dist: uvicorn[standard] >=0.12.0,<0.18.0 ; extra == "all" +Requires-Dist: python-jose[cryptography] >=3.3.0,<4.0.0 ; extra == "dev" +Requires-Dist: passlib[bcrypt] >=1.7.2,<2.0.0 ; extra == "dev" +Requires-Dist: autoflake >=1.4.0,<2.0.0 ; extra == "dev" +Requires-Dist: flake8 >=3.8.3,<6.0.0 ; extra == "dev" +Requires-Dist: uvicorn[standard] >=0.12.0,<0.18.0 ; extra == "dev" +Requires-Dist: pre-commit >=2.17.0,<3.0.0 ; extra == "dev" +Requires-Dist: mkdocs >=1.1.2,<2.0.0 ; extra == "doc" +Requires-Dist: mkdocs-material >=8.1.4,<9.0.0 ; extra == "doc" +Requires-Dist: mdx-include >=1.4.1,<2.0.0 ; extra == "doc" +Requires-Dist: mkdocs-markdownextradata-plugin >=0.1.7,<0.3.0 ; extra == "doc" +Requires-Dist: typer >=0.4.1,<0.5.0 ; extra == "doc" +Requires-Dist: pyyaml >=5.3.1,<7.0.0 ; extra == "doc" +Requires-Dist: pytest >=6.2.4,<7.0.0 ; extra == "test" +Requires-Dist: pytest-cov >=2.12.0,<4.0.0 ; extra == "test" +Requires-Dist: mypy ==0.910 ; extra == "test" +Requires-Dist: flake8 >=3.8.3,<6.0.0 ; extra == "test" +Requires-Dist: black == 22.3.0 ; extra == "test" +Requires-Dist: isort >=5.0.6,<6.0.0 ; extra == "test" +Requires-Dist: requests >=2.24.0,<3.0.0 ; extra == "test" +Requires-Dist: httpx >=0.14.0,<0.19.0 ; extra == "test" +Requires-Dist: email_validator >=1.1.1,<2.0.0 ; extra == "test" +Requires-Dist: sqlalchemy >=1.3.18,<1.5.0 ; extra == "test" +Requires-Dist: peewee >=3.13.3,<4.0.0 ; extra == "test" +Requires-Dist: databases[sqlite] >=0.3.2,<0.6.0 ; extra == "test" +Requires-Dist: orjson >=3.2.1,<4.0.0 ; extra == "test" +Requires-Dist: ujson >=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0 ; extra == "test" +Requires-Dist: python-multipart >=0.0.5,<0.0.6 ; extra == "test" +Requires-Dist: flask >=1.1.2,<3.0.0 ; extra == "test" +Requires-Dist: anyio[trio] >=3.2.1,<4.0.0 ; extra == "test" +Requires-Dist: types-ujson ==4.2.1 ; extra == "test" +Requires-Dist: types-orjson ==3.6.2 ; extra == "test" +Requires-Dist: types-dataclasses ==0.6.5 ; extra == "test" and ( python_version<'3.7') +Project-URL: Documentation, https://fastapi.tiangolo.com/ +Provides-Extra: all +Provides-Extra: dev +Provides-Extra: doc +Provides-Extra: test + +

+ FastAPI +

+

+ FastAPI framework, high performance, easy to learn, fast to code, ready for production +

+

+ + Test + + + Coverage + + + Package version + + + Supported Python versions + +

+ +--- + +**Documentation**: https://fastapi.tiangolo.com + +**Source Code**: https://github.com/tiangolo/fastapi + +--- + +FastAPI is a modern, fast (high-performance), web framework for building APIs with Python 3.6+ based on standard Python type hints. + +The key features are: + +* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). +* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * +* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * +* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. +* **Easy**: Designed to be easy to use and learn. Less time reading docs. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Robust**: Get production-ready code. With automatic interactive documentation. +* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. + +* estimation based on tests on an internal development team, building production applications. + +## Sponsors + + + + + + + + + + + + + + + + + + +Other sponsors + +## Opinions + +"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" + +
Kabir Khan - Microsoft (ref)
+ +--- + +"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" + +
Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
+ +--- + +"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" + +
Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
+ +--- + +"_I’m over the moon excited about **FastAPI**. It’s so fun!_" + +
Brian Okken - Python Bytes podcast host (ref)
+ +--- + +"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" + +
Timothy Crosley - Hug creator (ref)
+ +--- + +"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" + +"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" + +
Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
+ +--- + +## **Typer**, the FastAPI of CLIs + + + +If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. + +**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 + +## Requirements + +Python 3.6+ + +FastAPI stands on the shoulders of giants: + +* Starlette for the web parts. +* Pydantic for the data parts. + +## Installation + +
+ +```console +$ pip install fastapi + +---> 100% +``` + +
+ +You will also need an ASGI server, for production such as Uvicorn or Hypercorn. + +
+ +```console +$ pip install "uvicorn[standard]" + +---> 100% +``` + +
+ +## Example + +### Create it + +* Create a file `main.py` with: + +```Python +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +
+Or use async def... + +If your code uses `async` / `await`, use `async def`: + +```Python hl_lines="9 14" +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +async def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +**Note**: + +If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. + +
+ +### Run it + +Run the server with: + +
+ +```console +$ uvicorn main:app --reload + +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +INFO: Started reloader process [28720] +INFO: Started server process [28722] +INFO: Waiting for application startup. +INFO: Application startup complete. +``` + +
+ +
+About the command uvicorn main:app --reload... + +The command `uvicorn main:app` refers to: + +* `main`: the file `main.py` (the Python "module"). +* `app`: the object created inside of `main.py` with the line `app = FastAPI()`. +* `--reload`: make the server restart after code changes. Only do this for development. + +
+ +### Check it + +Open your browser at http://127.0.0.1:8000/items/5?q=somequery. + +You will see the JSON response as: + +```JSON +{"item_id": 5, "q": "somequery"} +``` + +You already created an API that: + +* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. +* Both _paths_ take `GET` operations (also known as HTTP _methods_). +* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. +* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. + +### Interactive API docs + +Now go to http://127.0.0.1:8000/docs. + +You will see the automatic interactive API documentation (provided by Swagger UI): + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) + +### Alternative API docs + +And now, go to http://127.0.0.1:8000/redoc. + +You will see the alternative automatic documentation (provided by ReDoc): + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) + +## Example upgrade + +Now modify the file `main.py` to receive a body from a `PUT` request. + +Declare the body using standard Python types, thanks to Pydantic. + +```Python hl_lines="4 9-12 25-27" +from typing import Union + +from fastapi import FastAPI +from pydantic import BaseModel + +app = FastAPI() + + +class Item(BaseModel): + name: str + price: float + is_offer: Union[bool, None] = None + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@app.put("/items/{item_id}") +def update_item(item_id: int, item: Item): + return {"item_name": item.name, "item_id": item_id} +``` + +The server should reload automatically (because you added `--reload` to the `uvicorn` command above). + +### Interactive API docs upgrade + +Now go to http://127.0.0.1:8000/docs. + +* The interactive API documentation will be automatically updated, including the new body: + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) + +* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) + +* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) + +### Alternative API docs upgrade + +And now, go to http://127.0.0.1:8000/redoc. + +* The alternative documentation will also reflect the new query parameter and body: + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) + +### Recap + +In summary, you declare **once** the types of parameters, body, etc. as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python 3.6+**. + +For example, for an `int`: + +```Python +item_id: int +``` + +or for a more complex `Item` model: + +```Python +item: Item +``` + +...and with that single declaration you get: + +* Editor support, including: + * Completion. + * Type checks. +* Validation of data: + * Automatic and clear errors when the data is invalid. + * Validation even for deeply nested JSON objects. +* Conversion of input data: coming from the network to Python data and types. Reading from: + * JSON. + * Path parameters. + * Query parameters. + * Cookies. + * Headers. + * Forms. + * Files. +* Conversion of output data: converting from Python data and types to network data (as JSON): + * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). + * `datetime` objects. + * `UUID` objects. + * Database models. + * ...and many more. +* Automatic interactive API documentation, including 2 alternative user interfaces: + * Swagger UI. + * ReDoc. + +--- + +Coming back to the previous code example, **FastAPI** will: + +* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. +* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. + * If it is not, the client will see a useful, clear error. +* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. + * As the `q` parameter is declared with `= None`, it is optional. + * Without the `None` it would be required (as is the body in the case with `PUT`). +* For `PUT` requests to `/items/{item_id}`, Read the body as JSON: + * Check that it has a required attribute `name` that should be a `str`. + * Check that it has a required attribute `price` that has to be a `float`. + * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. + * All this would also work for deeply nested JSON objects. +* Convert from and to JSON automatically. +* Document everything with OpenAPI, that can be used by: + * Interactive documentation systems. + * Automatic client code generation systems, for many languages. +* Provide 2 interactive documentation web interfaces directly. + +--- + +We just scratched the surface, but you already get the idea of how it all works. + +Try changing the line with: + +```Python + return {"item_name": item.name, "item_id": item_id} +``` + +...from: + +```Python + ... "item_name": item.name ... +``` + +...to: + +```Python + ... "item_price": item.price ... +``` + +...and see how your editor will auto-complete the attributes and know their types: + +![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) + +For a more complete example including more features, see the Tutorial - User Guide. + +**Spoiler alert**: the tutorial - user guide includes: + +* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. +* How to set **validation constraints** as `maximum_length` or `regex`. +* A very powerful and easy to use **Dependency Injection** system. +* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. +* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). +* **GraphQL** integration with Strawberry and other libraries. +* Many extra features (thanks to Starlette) as: + * **WebSockets** + * extremely easy tests based on `requests` and `pytest` + * **CORS** + * **Cookie Sessions** + * ...and more. + +## Performance + +Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) + +To understand more about it, see the section Benchmarks. + +## Optional Dependencies + +Used by Pydantic: + +* ujson - for faster JSON "parsing". +* email_validator - for email validation. + +Used by Starlette: + +* requests - Required if you want to use the `TestClient`. +* jinja2 - Required if you want to use the default template configuration. +* python-multipart - Required if you want to support form "parsing", with `request.form()`. +* itsdangerous - Required for `SessionMiddleware` support. +* pyyaml - Required for Starlette's `SchemaGenerator` support (you probably don't need it with FastAPI). +* ujson - Required if you want to use `UJSONResponse`. + +Used by FastAPI / Starlette: + +* uvicorn - for the server that loads and serves your application. +* orjson - Required if you want to use `ORJSONResponse`. + +You can install all of these with `pip install "fastapi[all]"`. + +## License + +This project is licensed under the terms of the MIT license. + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/RECORD new file mode 100644 index 00000000..88f57d5f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/RECORD @@ -0,0 +1,91 @@ +fastapi-0.81.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fastapi-0.81.0.dist-info/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 +fastapi-0.81.0.dist-info/METADATA,sha256=CD9zEFul1ZeqIjXG_CDdJOsBUO--4HnPgtRpq_8jSTs,25126 +fastapi-0.81.0.dist-info/RECORD,, +fastapi-0.81.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi-0.81.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +fastapi/__init__.py,sha256=zwiRX7r_siNxDDxGccYn9vMq5KoZEHagieRbpPmpiYI,1015 +fastapi/__pycache__/__init__.cpython-37.pyc,, +fastapi/__pycache__/applications.cpython-37.pyc,, +fastapi/__pycache__/background.cpython-37.pyc,, +fastapi/__pycache__/concurrency.cpython-37.pyc,, +fastapi/__pycache__/datastructures.cpython-37.pyc,, +fastapi/__pycache__/encoders.cpython-37.pyc,, +fastapi/__pycache__/exception_handlers.cpython-37.pyc,, +fastapi/__pycache__/exceptions.cpython-37.pyc,, +fastapi/__pycache__/logger.cpython-37.pyc,, +fastapi/__pycache__/param_functions.cpython-37.pyc,, +fastapi/__pycache__/params.cpython-37.pyc,, +fastapi/__pycache__/requests.cpython-37.pyc,, +fastapi/__pycache__/responses.cpython-37.pyc,, +fastapi/__pycache__/routing.cpython-37.pyc,, +fastapi/__pycache__/staticfiles.cpython-37.pyc,, +fastapi/__pycache__/templating.cpython-37.pyc,, +fastapi/__pycache__/testclient.cpython-37.pyc,, +fastapi/__pycache__/types.cpython-37.pyc,, +fastapi/__pycache__/utils.cpython-37.pyc,, +fastapi/__pycache__/websockets.cpython-37.pyc,, +fastapi/applications.py,sha256=DGDGZRPpNW_KRKMeyoo5gl4YuE1c9uCNWigLmsqXJ30,38062 +fastapi/background.py,sha256=HtN5_pJJrOdalSbuGSMKJAPNWUU5h7rY_BXXubu7-IQ,76 +fastapi/concurrency.py,sha256=G9Q9--9t5362vQuzLzB0pL8e6BbxcVU9EpBhVyaEGJ8,1078 +fastapi/datastructures.py,sha256=oW6xuU0C-sBwbcyXI-MlBO0tSS4BSPB2lYUa1yCw8-A,1905 +fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/dependencies/__pycache__/__init__.cpython-37.pyc,, +fastapi/dependencies/__pycache__/models.cpython-37.pyc,, +fastapi/dependencies/__pycache__/utils.cpython-37.pyc,, +fastapi/dependencies/models.py,sha256=zNbioxICuOeb-9ADDVQ45hUHOC0PBtPVEfVU3f1l_nc,2494 +fastapi/dependencies/utils.py,sha256=zsbThdKoayRv0pWplZ6jWjK9ekIoix036NO_nv565xc,27373 +fastapi/encoders.py,sha256=0Zd7Ad60YMVzB5A0rcHkUZwwriY9In7FY6ziyX5BZpw,5885 +fastapi/exception_handlers.py,sha256=UVYCCe4qt5-5_NuQ3SxTXjDvOdKMHiTfcLp3RUKXhg8,912 +fastapi/exceptions.py,sha256=Wy1sP3EisJohtxr-uKoH58QumPWmqHp6cpXOD3TTPOs,1117 +fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 +fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 +fastapi/middleware/__pycache__/__init__.cpython-37.pyc,, +fastapi/middleware/__pycache__/asyncexitstack.cpython-37.pyc,, +fastapi/middleware/__pycache__/cors.cpython-37.pyc,, +fastapi/middleware/__pycache__/gzip.cpython-37.pyc,, +fastapi/middleware/__pycache__/httpsredirect.cpython-37.pyc,, +fastapi/middleware/__pycache__/trustedhost.cpython-37.pyc,, +fastapi/middleware/__pycache__/wsgi.cpython-37.pyc,, +fastapi/middleware/asyncexitstack.py,sha256=72XjQmQ_tB_tTs9xOc0akXF_7TwZUPdyfc8gsN5LV8E,1197 +fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 +fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 +fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 +fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 +fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 +fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/openapi/__pycache__/__init__.cpython-37.pyc,, +fastapi/openapi/__pycache__/constants.cpython-37.pyc,, +fastapi/openapi/__pycache__/docs.cpython-37.pyc,, +fastapi/openapi/__pycache__/models.cpython-37.pyc,, +fastapi/openapi/__pycache__/utils.cpython-37.pyc,, +fastapi/openapi/constants.py,sha256=mWxYBjED6PU-tQ9X227Qkq2SsW2cv-C1jYFKt63xxEs,107 +fastapi/openapi/docs.py,sha256=JBRaq7EEmeC-xoRSRFj6qZWQxfOZW_jvTw0r-PiKcZ4,6532 +fastapi/openapi/models.py,sha256=_XWDBU4Zlp5M9V6YI1kmXbqYKwK_xZxaqIA_DhLwqHk,11027 +fastapi/openapi/utils.py,sha256=NUrPv65N8976sqfG1kxHiHJrPYxT53NkjCWgmuyAOF0,18409 +fastapi/param_functions.py,sha256=mhV6aNZmXuf_A7rZ830o3V-DFqbonIDRs_prDTetLs4,7521 +fastapi/params.py,sha256=LRoO2H1XBBIfBGB82gHtfnXhDZiDz-7CIordN3FoU1I,10600 +fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 +fastapi/responses.py,sha256=K9Gzj0E5GfwTgLiuCE-BuiXn9JdqGJHeXBMkvd8lFC8,1196 +fastapi/routing.py,sha256=gq17oux-hW8G5sr4ZhBRl-m6R3mx0jJRDHCMEMNTPcU,53459 +fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 +fastapi/security/__pycache__/__init__.cpython-37.pyc,, +fastapi/security/__pycache__/api_key.cpython-37.pyc,, +fastapi/security/__pycache__/base.cpython-37.pyc,, +fastapi/security/__pycache__/http.cpython-37.pyc,, +fastapi/security/__pycache__/oauth2.cpython-37.pyc,, +fastapi/security/__pycache__/open_id_connect_url.cpython-37.pyc,, +fastapi/security/__pycache__/utils.cpython-37.pyc,, +fastapi/security/api_key.py,sha256=NbVpS9TxDOaipoZa8-SREHyMtTcM3bmy5szMiQxEX9s,2793 +fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 +fastapi/security/http.py,sha256=ZSy3DFKFDLa3-I4vwsY1r8hQB_VrtAXw4-fMJauZIK0,5984 +fastapi/security/oauth2.py,sha256=1NPA12T1_r2uo4iQWxJCKjUqVVdb532YDvX9e3PVpcE,8212 +fastapi/security/open_id_connect_url.py,sha256=iikzuJCz_DG44Q77VrupqSoCbJYaiXkuo_W-kdmAzeo,1145 +fastapi/security/utils.py,sha256=izlh-HBaL1VnJeOeRTQnyNgI3hgTFs73eCyLy-snb4A,266 +fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 +fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 +fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 +fastapi/types.py,sha256=r6MngTHzkZOP9lzXgduje9yeZe5EInWAzCLuRJlhIuE,118 +fastapi/utils.py,sha256=g5JDiJ4vwc-l2ffpLdkkBDEjJJz3dqZbQ2cwz-yNVwI,6594 +fastapi/websockets.py,sha256=SroIkqE-lfChvtRP3mFaNKKtD6TmePDWBZtQfgM4noo,148 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/REQUESTED b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/WHEEL new file mode 100644 index 00000000..668ba4d0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi-0.81.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/__init__.py new file mode 100644 index 00000000..3688ec89 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/__init__.py @@ -0,0 +1,24 @@ +"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" + +__version__ = "0.81.0" + +from starlette import status as status + +from .applications import FastAPI as FastAPI +from .background import BackgroundTasks as BackgroundTasks +from .datastructures import UploadFile as UploadFile +from .exceptions import HTTPException as HTTPException +from .param_functions import Body as Body +from .param_functions import Cookie as Cookie +from .param_functions import Depends as Depends +from .param_functions import File as File +from .param_functions import Form as Form +from .param_functions import Header as Header +from .param_functions import Path as Path +from .param_functions import Query as Query +from .param_functions import Security as Security +from .requests import Request as Request +from .responses import Response as Response +from .routing import APIRouter as APIRouter +from .websockets import WebSocket as WebSocket +from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/applications.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/applications.py new file mode 100644 index 00000000..ac6050ca --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/applications.py @@ -0,0 +1,871 @@ +from enum import Enum +from typing import ( + Any, + Awaitable, + Callable, + Coroutine, + Dict, + List, + Optional, + Sequence, + Type, + Union, +) + +from fastapi import routing +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.encoders import DictIntStrAny, SetIntStr +from fastapi.exception_handlers import ( + http_exception_handler, + request_validation_exception_handler, +) +from fastapi.exceptions import RequestValidationError +from fastapi.logger import logger +from fastapi.middleware.asyncexitstack import AsyncExitStackMiddleware +from fastapi.openapi.docs import ( + get_redoc_html, + get_swagger_ui_html, + get_swagger_ui_oauth2_redirect_html, +) +from fastapi.openapi.utils import get_openapi +from fastapi.params import Depends +from fastapi.types import DecoratedCallable +from fastapi.utils import generate_unique_id +from starlette.applications import Starlette +from starlette.datastructures import State +from starlette.exceptions import ExceptionMiddleware, HTTPException +from starlette.middleware import Middleware +from starlette.middleware.errors import ServerErrorMiddleware +from starlette.requests import Request +from starlette.responses import HTMLResponse, JSONResponse, Response +from starlette.routing import BaseRoute +from starlette.types import ASGIApp, Receive, Scope, Send + + +class FastAPI(Starlette): + def __init__( + self, + *, + debug: bool = False, + routes: Optional[List[BaseRoute]] = None, + title: str = "FastAPI", + description: str = "", + version: str = "0.1.0", + openapi_url: Optional[str] = "/openapi.json", + openapi_tags: Optional[List[Dict[str, Any]]] = None, + servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + docs_url: Optional[str] = "/docs", + redoc_url: Optional[str] = "/redoc", + swagger_ui_oauth2_redirect_url: Optional[str] = "/docs/oauth2-redirect", + swagger_ui_init_oauth: Optional[Dict[str, Any]] = None, + middleware: Optional[Sequence[Middleware]] = None, + exception_handlers: Optional[ + Dict[ + Union[int, Type[Exception]], + Callable[[Request, Any], Coroutine[Any, Any, Response]], + ] + ] = None, + on_startup: Optional[Sequence[Callable[[], Any]]] = None, + on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[Dict[str, Union[str, Any]]] = None, + license_info: Optional[Dict[str, Union[str, Any]]] = None, + openapi_prefix: str = "", + root_path: str = "", + root_path_in_servers: bool = True, + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + swagger_ui_parameters: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + **extra: Any, + ) -> None: + self._debug: bool = debug + self.title = title + self.description = description + self.version = version + self.terms_of_service = terms_of_service + self.contact = contact + self.license_info = license_info + self.openapi_url = openapi_url + self.openapi_tags = openapi_tags + self.root_path_in_servers = root_path_in_servers + self.docs_url = docs_url + self.redoc_url = redoc_url + self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url + self.swagger_ui_init_oauth = swagger_ui_init_oauth + self.swagger_ui_parameters = swagger_ui_parameters + self.servers = servers or [] + self.extra = extra + self.openapi_version = "3.0.2" + self.openapi_schema: Optional[Dict[str, Any]] = None + if self.openapi_url: + assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" + assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" + # TODO: remove when discarding the openapi_prefix parameter + if openapi_prefix: + logger.warning( + '"openapi_prefix" has been deprecated in favor of "root_path", which ' + "follows more closely the ASGI standard, is simpler, and more " + "automatic. Check the docs at " + "https://fastapi.tiangolo.com/advanced/sub-applications/" + ) + self.root_path = root_path or openapi_prefix + self.state: State = State() + self.dependency_overrides: Dict[Callable[..., Any], Callable[..., Any]] = {} + self.router: routing.APIRouter = routing.APIRouter( + routes=routes, + dependency_overrides_provider=self, + on_startup=on_startup, + on_shutdown=on_shutdown, + default_response_class=default_response_class, + dependencies=dependencies, + callbacks=callbacks, + deprecated=deprecated, + include_in_schema=include_in_schema, + responses=responses, + generate_unique_id_function=generate_unique_id_function, + ) + self.exception_handlers: Dict[ + Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] + ] = ({} if exception_handlers is None else dict(exception_handlers)) + self.exception_handlers.setdefault(HTTPException, http_exception_handler) + self.exception_handlers.setdefault( + RequestValidationError, request_validation_exception_handler + ) + + self.user_middleware: List[Middleware] = ( + [] if middleware is None else list(middleware) + ) + self.middleware_stack: ASGIApp = self.build_middleware_stack() + self.setup() + + def build_middleware_stack(self) -> ASGIApp: + # Duplicate/override from Starlette to add AsyncExitStackMiddleware + # inside of ExceptionMiddleware, inside of custom user middlewares + debug = self.debug + error_handler = None + exception_handlers = {} + + for key, value in self.exception_handlers.items(): + if key in (500, Exception): + error_handler = value + else: + exception_handlers[key] = value + + middleware = ( + [Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)] + + self.user_middleware + + [ + Middleware( + ExceptionMiddleware, handlers=exception_handlers, debug=debug + ), + # Add FastAPI-specific AsyncExitStackMiddleware for dependencies with + # contextvars. + # This needs to happen after user middlewares because those create a + # new contextvars context copy by using a new AnyIO task group. + # The initial part of dependencies with yield is executed in the + # FastAPI code, inside all the middlewares, but the teardown part + # (after yield) is executed in the AsyncExitStack in this middleware, + # if the AsyncExitStack lived outside of the custom middlewares and + # contextvars were set in a dependency with yield in that internal + # contextvars context, the values would not be available in the + # outside context of the AsyncExitStack. + # By putting the middleware and the AsyncExitStack here, inside all + # user middlewares, the code before and after yield in dependencies + # with yield is executed in the same contextvars context, so all values + # set in contextvars before yield is still available after yield as + # would be expected. + # Additionally, by having this AsyncExitStack here, after the + # ExceptionMiddleware, now dependencies can catch handled exceptions, + # e.g. HTTPException, to customize the teardown code (e.g. DB session + # rollback). + Middleware(AsyncExitStackMiddleware), + ] + ) + + app = self.router + for cls, options in reversed(middleware): + app = cls(app=app, **options) + return app + + def openapi(self) -> Dict[str, Any]: + if not self.openapi_schema: + self.openapi_schema = get_openapi( + title=self.title, + version=self.version, + openapi_version=self.openapi_version, + description=self.description, + terms_of_service=self.terms_of_service, + contact=self.contact, + license_info=self.license_info, + routes=self.routes, + tags=self.openapi_tags, + servers=self.servers, + ) + return self.openapi_schema + + def setup(self) -> None: + if self.openapi_url: + urls = (server_data.get("url") for server_data in self.servers) + server_urls = {url for url in urls if url} + + async def openapi(req: Request) -> JSONResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + if root_path not in server_urls: + if root_path and self.root_path_in_servers: + self.servers.insert(0, {"url": root_path}) + server_urls.add(root_path) + return JSONResponse(self.openapi()) + + self.add_route(self.openapi_url, openapi, include_in_schema=False) + if self.openapi_url and self.docs_url: + + async def swagger_ui_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url + if oauth2_redirect_url: + oauth2_redirect_url = root_path + oauth2_redirect_url + return get_swagger_ui_html( + openapi_url=openapi_url, + title=self.title + " - Swagger UI", + oauth2_redirect_url=oauth2_redirect_url, + init_oauth=self.swagger_ui_init_oauth, + swagger_ui_parameters=self.swagger_ui_parameters, + ) + + self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) + + if self.swagger_ui_oauth2_redirect_url: + + async def swagger_ui_redirect(req: Request) -> HTMLResponse: + return get_swagger_ui_oauth2_redirect_html() + + self.add_route( + self.swagger_ui_oauth2_redirect_url, + swagger_ui_redirect, + include_in_schema=False, + ) + if self.openapi_url and self.redoc_url: + + async def redoc_html(req: Request) -> HTMLResponse: + root_path = req.scope.get("root_path", "").rstrip("/") + openapi_url = root_path + self.openapi_url + return get_redoc_html( + openapi_url=openapi_url, title=self.title + " - ReDoc" + ) + + self.add_route(self.redoc_url, redoc_html, include_in_schema=False) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.root_path: + scope["root_path"] = self.root_path + await super().__call__(scope, receive, send) + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Coroutine[Any, Any, Response]], + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.add_api_route( + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def api_route( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.router.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None + ) -> None: + self.router.add_api_websocket_route(path, endpoint, name=name) + + def websocket( + self, path: str, name: Optional[str] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: routing.APIRouter, + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + default_response_class: Type[Response] = Default(JSONResponse), + callbacks: Optional[List[BaseRoute]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + self.router.include_router( + router, + prefix=prefix, + tags=tags, + dependencies=dependencies, + responses=responses, + deprecated=deprecated, + include_in_schema=include_in_schema, + default_response_class=default_response_class, + callbacks=callbacks, + generate_unique_id_function=generate_unique_id_function, + ) + + def get( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.get( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.put( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.post( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.delete( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + operation_id=operation_id, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.options( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.head( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.patch( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.router.trace( + path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/background.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/background.py new file mode 100644 index 00000000..dd3bbe24 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/background.py @@ -0,0 +1 @@ +from starlette.background import BackgroundTasks as BackgroundTasks # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/concurrency.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/concurrency.py new file mode 100644 index 00000000..becac3f3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/concurrency.py @@ -0,0 +1,32 @@ +import sys +from typing import AsyncGenerator, ContextManager, TypeVar + +from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa +from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa +from starlette.concurrency import ( # noqa + run_until_first_complete as run_until_first_complete, +) + +if sys.version_info >= (3, 7): + from contextlib import AsyncExitStack as AsyncExitStack + from contextlib import asynccontextmanager as asynccontextmanager +else: + from contextlib2 import AsyncExitStack as AsyncExitStack # noqa + from contextlib2 import asynccontextmanager as asynccontextmanager # noqa + + +_T = TypeVar("_T") + + +@asynccontextmanager +async def contextmanager_in_threadpool( + cm: ContextManager[_T], +) -> AsyncGenerator[_T, None]: + try: + yield await run_in_threadpool(cm.__enter__) + except Exception as e: + ok: bool = await run_in_threadpool(cm.__exit__, type(e), e, None) + if not ok: + raise e + else: + await run_in_threadpool(cm.__exit__, None, None, None) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/datastructures.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/datastructures.py new file mode 100644 index 00000000..b20a25ab --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/datastructures.py @@ -0,0 +1,56 @@ +from typing import Any, Callable, Dict, Iterable, Type, TypeVar + +from starlette.datastructures import URL as URL # noqa: F401 +from starlette.datastructures import Address as Address # noqa: F401 +from starlette.datastructures import FormData as FormData # noqa: F401 +from starlette.datastructures import Headers as Headers # noqa: F401 +from starlette.datastructures import QueryParams as QueryParams # noqa: F401 +from starlette.datastructures import State as State # noqa: F401 +from starlette.datastructures import UploadFile as StarletteUploadFile + + +class UploadFile(StarletteUploadFile): + @classmethod + def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls: Type["UploadFile"], v: Any) -> Any: + if not isinstance(v, StarletteUploadFile): + raise ValueError(f"Expected UploadFile, received: {type(v)}") + return v + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update({"type": "string", "format": "binary"}) + + +class DefaultPlaceholder: + """ + You shouldn't use this class directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + + def __init__(self, value: Any): + self.value = value + + def __bool__(self) -> bool: + return bool(self.value) + + def __eq__(self, o: object) -> bool: + return isinstance(o, DefaultPlaceholder) and o.value == self.value + + +DefaultType = TypeVar("DefaultType") + + +def Default(value: DefaultType) -> DefaultType: + """ + You shouldn't use this function directly. + + It's used internally to recognize when a default value has been overwritten, even + if the overridden default value was truthy. + """ + return DefaultPlaceholder(value) # type: ignore diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/models.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/models.py new file mode 100644 index 00000000..443590b9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/models.py @@ -0,0 +1,58 @@ +from typing import Any, Callable, List, Optional, Sequence + +from fastapi.security.base import SecurityBase +from pydantic.fields import ModelField + + +class SecurityRequirement: + def __init__( + self, security_scheme: SecurityBase, scopes: Optional[Sequence[str]] = None + ): + self.security_scheme = security_scheme + self.scopes = scopes + + +class Dependant: + def __init__( + self, + *, + path_params: Optional[List[ModelField]] = None, + query_params: Optional[List[ModelField]] = None, + header_params: Optional[List[ModelField]] = None, + cookie_params: Optional[List[ModelField]] = None, + body_params: Optional[List[ModelField]] = None, + dependencies: Optional[List["Dependant"]] = None, + security_schemes: Optional[List[SecurityRequirement]] = None, + name: Optional[str] = None, + call: Optional[Callable[..., Any]] = None, + request_param_name: Optional[str] = None, + websocket_param_name: Optional[str] = None, + http_connection_param_name: Optional[str] = None, + response_param_name: Optional[str] = None, + background_tasks_param_name: Optional[str] = None, + security_scopes_param_name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, + path: Optional[str] = None, + ) -> None: + self.path_params = path_params or [] + self.query_params = query_params or [] + self.header_params = header_params or [] + self.cookie_params = cookie_params or [] + self.body_params = body_params or [] + self.dependencies = dependencies or [] + self.security_requirements = security_schemes or [] + self.request_param_name = request_param_name + self.websocket_param_name = websocket_param_name + self.http_connection_param_name = http_connection_param_name + self.response_param_name = response_param_name + self.background_tasks_param_name = background_tasks_param_name + self.security_scopes = security_scopes + self.security_scopes_param_name = security_scopes_param_name + self.name = name + self.call = call + self.use_cache = use_cache + # Store the path to be able to re-generate a dependable from it in overrides + self.path = path + # Save the cache key at creation to optimize performance + self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/utils.py new file mode 100644 index 00000000..d098b65f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/dependencies/utils.py @@ -0,0 +1,756 @@ +import dataclasses +import inspect +from contextlib import contextmanager +from copy import deepcopy +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import anyio +from fastapi import params +from fastapi.concurrency import ( + AsyncExitStack, + asynccontextmanager, + contextmanager_in_threadpool, +) +from fastapi.dependencies.models import Dependant, SecurityRequirement +from fastapi.logger import logger +from fastapi.security.base import SecurityBase +from fastapi.security.oauth2 import OAuth2, SecurityScopes +from fastapi.security.open_id_connect_url import OpenIdConnect +from fastapi.utils import create_response_field, get_path_param_names +from pydantic import BaseModel, create_model +from pydantic.error_wrappers import ErrorWrapper +from pydantic.errors import MissingError +from pydantic.fields import ( + SHAPE_FROZENSET, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, + Required, + Undefined, +) +from pydantic.schema import get_annotation_from_field_info +from pydantic.typing import ForwardRef, evaluate_forwardref +from pydantic.utils import lenient_issubclass +from starlette.background import BackgroundTasks +from starlette.concurrency import run_in_threadpool +from starlette.datastructures import FormData, Headers, QueryParams, UploadFile +from starlette.requests import HTTPConnection, Request +from starlette.responses import Response +from starlette.websockets import WebSocket + +sequence_shapes = { + SHAPE_LIST, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_TUPLE, + SHAPE_SEQUENCE, + SHAPE_TUPLE_ELLIPSIS, +} +sequence_types = (list, set, tuple) +sequence_shape_to_type = { + SHAPE_LIST: list, + SHAPE_SET: set, + SHAPE_TUPLE: tuple, + SHAPE_SEQUENCE: list, + SHAPE_TUPLE_ELLIPSIS: list, +} + + +multipart_not_installed_error = ( + 'Form data requires "python-multipart" to be installed. \n' + 'You can install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) +multipart_incorrect_install_error = ( + 'Form data requires "python-multipart" to be installed. ' + 'It seems you installed "multipart" instead. \n' + 'You can remove "multipart" with: \n\n' + "pip uninstall multipart\n\n" + 'And then install "python-multipart" with: \n\n' + "pip install python-multipart\n" +) + + +def check_file_field(field: ModelField) -> None: + field_info = field.field_info + if isinstance(field_info, params.Form): + try: + # __version__ is available in both multiparts, and can be mocked + from multipart import __version__ # type: ignore + + assert __version__ + try: + # parse_options_header is only available in the right multipart + from multipart.multipart import parse_options_header # type: ignore + + assert parse_options_header + except ImportError: + logger.error(multipart_incorrect_install_error) + raise RuntimeError(multipart_incorrect_install_error) + except ImportError: + logger.error(multipart_not_installed_error) + raise RuntimeError(multipart_not_installed_error) + + +def get_param_sub_dependant( + *, param: inspect.Parameter, path: str, security_scopes: Optional[List[str]] = None +) -> Dependant: + depends: params.Depends = param.default + if depends.dependency: + dependency = depends.dependency + else: + dependency = param.annotation + return get_sub_dependant( + depends=depends, + dependency=dependency, + path=path, + name=param.name, + security_scopes=security_scopes, + ) + + +def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: + assert callable( + depends.dependency + ), "A parameter-less dependency must have a callable dependency" + return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) + + +def get_sub_dependant( + *, + depends: params.Depends, + dependency: Callable[..., Any], + path: str, + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, +) -> Dependant: + security_requirement = None + security_scopes = security_scopes or [] + if isinstance(depends, params.Security): + dependency_scopes = depends.scopes + security_scopes.extend(dependency_scopes) + if isinstance(dependency, SecurityBase): + use_scopes: List[str] = [] + if isinstance(dependency, (OAuth2, OpenIdConnect)): + use_scopes = security_scopes + security_requirement = SecurityRequirement( + security_scheme=dependency, scopes=use_scopes + ) + sub_dependant = get_dependant( + path=path, + call=dependency, + name=name, + security_scopes=security_scopes, + use_cache=depends.use_cache, + ) + if security_requirement: + sub_dependant.security_requirements.append(security_requirement) + return sub_dependant + + +CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] + + +def get_flat_dependant( + dependant: Dependant, + *, + skip_repeats: bool = False, + visited: Optional[List[CacheKey]] = None, +) -> Dependant: + if visited is None: + visited = [] + visited.append(dependant.cache_key) + + flat_dependant = Dependant( + path_params=dependant.path_params.copy(), + query_params=dependant.query_params.copy(), + header_params=dependant.header_params.copy(), + cookie_params=dependant.cookie_params.copy(), + body_params=dependant.body_params.copy(), + security_schemes=dependant.security_requirements.copy(), + use_cache=dependant.use_cache, + path=dependant.path, + ) + for sub_dependant in dependant.dependencies: + if skip_repeats and sub_dependant.cache_key in visited: + continue + flat_sub = get_flat_dependant( + sub_dependant, skip_repeats=skip_repeats, visited=visited + ) + flat_dependant.path_params.extend(flat_sub.path_params) + flat_dependant.query_params.extend(flat_sub.query_params) + flat_dependant.header_params.extend(flat_sub.header_params) + flat_dependant.cookie_params.extend(flat_sub.cookie_params) + flat_dependant.body_params.extend(flat_sub.body_params) + flat_dependant.security_requirements.extend(flat_sub.security_requirements) + return flat_dependant + + +def get_flat_params(dependant: Dependant) -> List[ModelField]: + flat_dependant = get_flat_dependant(dependant, skip_repeats=True) + return ( + flat_dependant.path_params + + flat_dependant.query_params + + flat_dependant.header_params + + flat_dependant.cookie_params + ) + + +def is_scalar_field(field: ModelField) -> bool: + field_info = field.field_info + if not ( + field.shape == SHAPE_SINGLETON + and not lenient_issubclass(field.type_, BaseModel) + and not lenient_issubclass(field.type_, sequence_types + (dict,)) + and not dataclasses.is_dataclass(field.type_) + and not isinstance(field_info, params.Body) + ): + return False + if field.sub_fields: + if not all(is_scalar_field(f) for f in field.sub_fields): + return False + return True + + +def is_scalar_sequence_field(field: ModelField) -> bool: + if (field.shape in sequence_shapes) and not lenient_issubclass( + field.type_, BaseModel + ): + if field.sub_fields is not None: + for sub_field in field.sub_fields: + if not is_scalar_field(sub_field): + return False + return True + if lenient_issubclass(field.type_, sequence_types): + return True + return False + + +def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: + signature = inspect.signature(call) + globalns = getattr(call, "__globals__", {}) + typed_params = [ + inspect.Parameter( + name=param.name, + kind=param.kind, + default=param.default, + annotation=get_typed_annotation(param, globalns), + ) + for param in signature.parameters.values() + ] + typed_signature = inspect.Signature(typed_params) + return typed_signature + + +def get_typed_annotation(param: inspect.Parameter, globalns: Dict[str, Any]) -> Any: + annotation = param.annotation + if isinstance(annotation, str): + annotation = ForwardRef(annotation) + annotation = evaluate_forwardref(annotation, globalns, globalns) + return annotation + + +def get_dependant( + *, + path: str, + call: Callable[..., Any], + name: Optional[str] = None, + security_scopes: Optional[List[str]] = None, + use_cache: bool = True, +) -> Dependant: + path_param_names = get_path_param_names(path) + endpoint_signature = get_typed_signature(call) + signature_params = endpoint_signature.parameters + dependant = Dependant( + call=call, + name=name, + path=path, + security_scopes=security_scopes, + use_cache=use_cache, + ) + for param_name, param in signature_params.items(): + if isinstance(param.default, params.Depends): + sub_dependant = get_param_sub_dependant( + param=param, path=path, security_scopes=security_scopes + ) + dependant.dependencies.append(sub_dependant) + continue + if add_non_field_param_to_dependency(param=param, dependant=dependant): + continue + param_field = get_param_field( + param=param, default_field_info=params.Query, param_name=param_name + ) + if param_name in path_param_names: + assert is_scalar_field( + field=param_field + ), "Path params must be of one of the supported types" + ignore_default = not isinstance(param.default, params.Path) + param_field = get_param_field( + param=param, + param_name=param_name, + default_field_info=params.Path, + force_type=params.ParamTypes.path, + ignore_default=ignore_default, + ) + add_param_to_fields(field=param_field, dependant=dependant) + elif is_scalar_field(field=param_field): + add_param_to_fields(field=param_field, dependant=dependant) + elif isinstance( + param.default, (params.Query, params.Header) + ) and is_scalar_sequence_field(param_field): + add_param_to_fields(field=param_field, dependant=dependant) + else: + field_info = param_field.field_info + assert isinstance( + field_info, params.Body + ), f"Param: {param_field.name} can only be a request body, using Body()" + dependant.body_params.append(param_field) + return dependant + + +def add_non_field_param_to_dependency( + *, param: inspect.Parameter, dependant: Dependant +) -> Optional[bool]: + if lenient_issubclass(param.annotation, Request): + dependant.request_param_name = param.name + return True + elif lenient_issubclass(param.annotation, WebSocket): + dependant.websocket_param_name = param.name + return True + elif lenient_issubclass(param.annotation, HTTPConnection): + dependant.http_connection_param_name = param.name + return True + elif lenient_issubclass(param.annotation, Response): + dependant.response_param_name = param.name + return True + elif lenient_issubclass(param.annotation, BackgroundTasks): + dependant.background_tasks_param_name = param.name + return True + elif lenient_issubclass(param.annotation, SecurityScopes): + dependant.security_scopes_param_name = param.name + return True + return None + + +def get_param_field( + *, + param: inspect.Parameter, + param_name: str, + default_field_info: Type[params.Param] = params.Param, + force_type: Optional[params.ParamTypes] = None, + ignore_default: bool = False, +) -> ModelField: + default_value: Any = Undefined + had_schema = False + if not param.default == param.empty and ignore_default is False: + default_value = param.default + if isinstance(default_value, FieldInfo): + had_schema = True + field_info = default_value + default_value = field_info.default + if ( + isinstance(field_info, params.Param) + and getattr(field_info, "in_", None) is None + ): + field_info.in_ = default_field_info.in_ + if force_type: + field_info.in_ = force_type # type: ignore + else: + field_info = default_field_info(default=default_value) + required = True + if default_value is Required or ignore_default: + required = True + default_value = None + elif default_value is not Undefined: + required = False + annotation: Any = Any + if not param.annotation == param.empty: + annotation = param.annotation + annotation = get_annotation_from_field_info(annotation, field_info, param_name) + if not field_info.alias and getattr(field_info, "convert_underscores", None): + alias = param.name.replace("_", "-") + else: + alias = field_info.alias or param.name + field = create_response_field( + name=param.name, + type_=annotation, + default=default_value, + alias=alias, + required=required, + field_info=field_info, + ) + if not had_schema and not is_scalar_field(field=field): + field.field_info = params.Body(field_info.default) + if not had_schema and lenient_issubclass(field.type_, UploadFile): + field.field_info = params.File(field_info.default) + + return field + + +def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: + field_info = cast(params.Param, field.field_info) + if field_info.in_ == params.ParamTypes.path: + dependant.path_params.append(field) + elif field_info.in_ == params.ParamTypes.query: + dependant.query_params.append(field) + elif field_info.in_ == params.ParamTypes.header: + dependant.header_params.append(field) + else: + assert ( + field_info.in_ == params.ParamTypes.cookie + ), f"non-body parameters must be in path, query, header or cookie: {field.name}" + dependant.cookie_params.append(field) + + +def is_coroutine_callable(call: Callable[..., Any]) -> bool: + if inspect.isroutine(call): + return inspect.iscoroutinefunction(call) + if inspect.isclass(call): + return False + call = getattr(call, "__call__", None) + return inspect.iscoroutinefunction(call) + + +def is_async_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isasyncgenfunction(call): + return True + call = getattr(call, "__call__", None) + return inspect.isasyncgenfunction(call) + + +def is_gen_callable(call: Callable[..., Any]) -> bool: + if inspect.isgeneratorfunction(call): + return True + call = getattr(call, "__call__", None) + return inspect.isgeneratorfunction(call) + + +async def solve_generator( + *, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any] +) -> Any: + if is_gen_callable(call): + cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values)) + elif is_async_gen_callable(call): + cm = asynccontextmanager(call)(**sub_values) + return await stack.enter_async_context(cm) + + +async def solve_dependencies( + *, + request: Union[Request, WebSocket], + dependant: Dependant, + body: Optional[Union[Dict[str, Any], FormData]] = None, + background_tasks: Optional[BackgroundTasks] = None, + response: Optional[Response] = None, + dependency_overrides_provider: Optional[Any] = None, + dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, +) -> Tuple[ + Dict[str, Any], + List[ErrorWrapper], + Optional[BackgroundTasks], + Response, + Dict[Tuple[Callable[..., Any], Tuple[str]], Any], +]: + values: Dict[str, Any] = {} + errors: List[ErrorWrapper] = [] + if response is None: + response = Response() + del response.headers["content-length"] + response.status_code = None # type: ignore + dependency_cache = dependency_cache or {} + sub_dependant: Dependant + for sub_dependant in dependant.dependencies: + sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) + sub_dependant.cache_key = cast( + Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key + ) + call = sub_dependant.call + use_sub_dependant = sub_dependant + if ( + dependency_overrides_provider + and dependency_overrides_provider.dependency_overrides + ): + original_call = sub_dependant.call + call = getattr( + dependency_overrides_provider, "dependency_overrides", {} + ).get(original_call, original_call) + use_path: str = sub_dependant.path # type: ignore + use_sub_dependant = get_dependant( + path=use_path, + call=call, + name=sub_dependant.name, + security_scopes=sub_dependant.security_scopes, + ) + + solved_result = await solve_dependencies( + request=request, + dependant=use_sub_dependant, + body=body, + background_tasks=background_tasks, + response=response, + dependency_overrides_provider=dependency_overrides_provider, + dependency_cache=dependency_cache, + ) + ( + sub_values, + sub_errors, + background_tasks, + _, # the subdependency returns the same response we have + sub_dependency_cache, + ) = solved_result + dependency_cache.update(sub_dependency_cache) + if sub_errors: + errors.extend(sub_errors) + continue + if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: + solved = dependency_cache[sub_dependant.cache_key] + elif is_gen_callable(call) or is_async_gen_callable(call): + stack = request.scope.get("fastapi_astack") + assert isinstance(stack, AsyncExitStack) + solved = await solve_generator( + call=call, stack=stack, sub_values=sub_values + ) + elif is_coroutine_callable(call): + solved = await call(**sub_values) + else: + solved = await run_in_threadpool(call, **sub_values) + if sub_dependant.name is not None: + values[sub_dependant.name] = solved + if sub_dependant.cache_key not in dependency_cache: + dependency_cache[sub_dependant.cache_key] = solved + path_values, path_errors = request_params_to_args( + dependant.path_params, request.path_params + ) + query_values, query_errors = request_params_to_args( + dependant.query_params, request.query_params + ) + header_values, header_errors = request_params_to_args( + dependant.header_params, request.headers + ) + cookie_values, cookie_errors = request_params_to_args( + dependant.cookie_params, request.cookies + ) + values.update(path_values) + values.update(query_values) + values.update(header_values) + values.update(cookie_values) + errors += path_errors + query_errors + header_errors + cookie_errors + if dependant.body_params: + ( + body_values, + body_errors, + ) = await request_body_to_args( # body_params checked above + required_params=dependant.body_params, received_body=body + ) + values.update(body_values) + errors.extend(body_errors) + if dependant.http_connection_param_name: + values[dependant.http_connection_param_name] = request + if dependant.request_param_name and isinstance(request, Request): + values[dependant.request_param_name] = request + elif dependant.websocket_param_name and isinstance(request, WebSocket): + values[dependant.websocket_param_name] = request + if dependant.background_tasks_param_name: + if background_tasks is None: + background_tasks = BackgroundTasks() + values[dependant.background_tasks_param_name] = background_tasks + if dependant.response_param_name: + values[dependant.response_param_name] = response + if dependant.security_scopes_param_name: + values[dependant.security_scopes_param_name] = SecurityScopes( + scopes=dependant.security_scopes + ) + return values, errors, background_tasks, response, dependency_cache + + +def request_params_to_args( + required_params: Sequence[ModelField], + received_params: Union[Mapping[str, Any], QueryParams, Headers], +) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: + values = {} + errors = [] + for field in required_params: + if is_scalar_sequence_field(field) and isinstance( + received_params, (QueryParams, Headers) + ): + value = received_params.getlist(field.alias) or field.default + else: + value = received_params.get(field.alias) + field_info = field.field_info + assert isinstance( + field_info, params.Param + ), "Params must be subclasses of Param" + if value is None: + if field.required: + errors.append( + ErrorWrapper( + MissingError(), loc=(field_info.in_.value, field.alias) + ) + ) + else: + values[field.name] = deepcopy(field.default) + continue + v_, errors_ = field.validate( + value, values, loc=(field_info.in_.value, field.alias) + ) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +async def request_body_to_args( + required_params: List[ModelField], + received_body: Optional[Union[Dict[str, Any], FormData]], +) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: + values = {} + errors = [] + if required_params: + field = required_params[0] + field_info = field.field_info + embed = getattr(field_info, "embed", None) + field_alias_omitted = len(required_params) == 1 and not embed + if field_alias_omitted: + received_body = {field.alias: received_body} + + for field in required_params: + loc: Tuple[str, ...] + if field_alias_omitted: + loc = ("body",) + else: + loc = ("body", field.alias) + + value: Optional[Any] = None + if received_body is not None: + if ( + field.shape in sequence_shapes or field.type_ in sequence_types + ) and isinstance(received_body, FormData): + value = received_body.getlist(field.alias) + else: + try: + value = received_body.get(field.alias) + except AttributeError: + errors.append(get_missing_field_error(loc)) + continue + if ( + value is None + or (isinstance(field_info, params.Form) and value == "") + or ( + isinstance(field_info, params.Form) + and field.shape in sequence_shapes + and len(value) == 0 + ) + ): + if field.required: + errors.append(get_missing_field_error(loc)) + else: + values[field.name] = deepcopy(field.default) + continue + if ( + isinstance(field_info, params.File) + and lenient_issubclass(field.type_, bytes) + and isinstance(value, UploadFile) + ): + value = await value.read() + elif ( + field.shape in sequence_shapes + and isinstance(field_info, params.File) + and lenient_issubclass(field.type_, bytes) + and isinstance(value, sequence_types) + ): + results: List[Union[bytes, str]] = [] + + async def process_fn( + fn: Callable[[], Coroutine[Any, Any, Any]] + ) -> None: + result = await fn() + results.append(result) + + async with anyio.create_task_group() as tg: + for sub_value in value: + tg.start_soon(process_fn, sub_value.read) + value = sequence_shape_to_type[field.shape](results) + + v_, errors_ = field.validate(value, values, loc=loc) + + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[field.name] = v_ + return values, errors + + +def get_missing_field_error(loc: Tuple[str, ...]) -> ErrorWrapper: + missing_field_error = ErrorWrapper(MissingError(), loc=loc) + return missing_field_error + + +def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]: + flat_dependant = get_flat_dependant(dependant) + if not flat_dependant.body_params: + return None + first_param = flat_dependant.body_params[0] + field_info = first_param.field_info + embed = getattr(field_info, "embed", None) + body_param_names_set = {param.name for param in flat_dependant.body_params} + if len(body_param_names_set) == 1 and not embed: + check_file_field(first_param) + return first_param + # If one field requires to embed, all have to be embedded + # in case a sub-dependency is evaluated with a single unique body field + # That is combined (embedded) with other body fields + for param in flat_dependant.body_params: + setattr(param.field_info, "embed", True) + model_name = "Body_" + name + BodyModel: Type[BaseModel] = create_model(model_name) + for f in flat_dependant.body_params: + BodyModel.__fields__[f.name] = f + required = any(True for f in flat_dependant.body_params if f.required) + + BodyFieldInfo_kwargs: Dict[str, Any] = dict(default=None) + if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): + BodyFieldInfo: Type[params.Body] = params.File + elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): + BodyFieldInfo = params.Form + else: + BodyFieldInfo = params.Body + + body_param_media_types = [ + getattr(f.field_info, "media_type") + for f in flat_dependant.body_params + if isinstance(f.field_info, params.Body) + ] + if len(set(body_param_media_types)) == 1: + BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] + final_field = create_response_field( + name="body", + type_=BodyModel, + required=required, + alias="body", + field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), + ) + check_file_field(final_field) + return final_field diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/encoders.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/encoders.py new file mode 100644 index 00000000..f64e4b86 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/encoders.py @@ -0,0 +1,167 @@ +import dataclasses +from collections import defaultdict +from enum import Enum +from pathlib import PurePath +from types import GeneratorType +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union + +from pydantic import BaseModel +from pydantic.json import ENCODERS_BY_TYPE + +SetIntStr = Set[Union[int, str]] +DictIntStrAny = Dict[Union[int, str], Any] + + +def generate_encoders_by_class_tuples( + type_encoder_map: Dict[Any, Callable[[Any], Any]] +) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( + tuple + ) + for type_, encoder in type_encoder_map.items(): + encoders_by_class_tuples[encoder] += (type_,) + return encoders_by_class_tuples + + +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) + + +def jsonable_encoder( + obj: Any, + include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None, + sqlalchemy_safe: bool = True, +) -> Any: + custom_encoder = custom_encoder or {} + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if include is not None and not isinstance(include, (set, dict)): + include = set(include) + if exclude is not None and not isinstance(exclude, (set, dict)): + exclude = set(exclude) + if isinstance(obj, BaseModel): + encoder = getattr(obj.__config__, "json_encoders", {}) + if custom_encoder: + encoder.update(custom_encoder) + obj_dict = obj.dict( + include=include, # type: ignore # in Pydantic + exclude=exclude, # type: ignore # in Pydantic + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + ) + if "__root__" in obj_dict: + obj_dict = obj_dict["__root__"] + return jsonable_encoder( + obj_dict, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + custom_encoder=encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if dataclasses.is_dataclass(obj): + obj_dict = dataclasses.asdict(obj) + return jsonable_encoder( + obj_dict, + exclude_none=exclude_none, + exclude_defaults=exclude_defaults, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dict): + encoded_dict = {} + allowed_keys = set(obj.keys()) + if include is not None: + allowed_keys &= set(include) + if exclude is not None: + allowed_keys -= set(exclude) + for key, value in obj.items(): + if ( + ( + not sqlalchemy_safe + or (not isinstance(key, str)) + or (not key.startswith("_sa")) + ) + and (value is not None or not exclude_none) + and key in allowed_keys + ): + encoded_key = jsonable_encoder( + key, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_value = jsonable_encoder( + value, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): + encoded_list = [] + for item in obj: + encoded_list.append( + jsonable_encoder( + item, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + ) + return encoded_list + + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(obj, classes_tuple): + return encoder(obj) + + try: + data = dict(obj) + except Exception as e: + errors: List[Exception] = [] + errors.append(e) + try: + data = vars(obj) + except Exception as e: + errors.append(e) + raise ValueError(errors) + return jsonable_encoder( + data, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exception_handlers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exception_handlers.py new file mode 100644 index 00000000..2b286d71 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exception_handlers.py @@ -0,0 +1,25 @@ +from fastapi.encoders import jsonable_encoder +from fastapi.exceptions import RequestValidationError +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + + +async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse: + headers = getattr(exc, "headers", None) + if headers: + return JSONResponse( + {"detail": exc.detail}, status_code=exc.status_code, headers=headers + ) + else: + return JSONResponse({"detail": exc.detail}, status_code=exc.status_code) + + +async def request_validation_exception_handler( + request: Request, exc: RequestValidationError +) -> JSONResponse: + return JSONResponse( + status_code=HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": jsonable_encoder(exc.errors())}, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exceptions.py new file mode 100644 index 00000000..0f50acc6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/exceptions.py @@ -0,0 +1,36 @@ +from typing import Any, Dict, Optional, Sequence, Type + +from pydantic import BaseModel, ValidationError, create_model +from pydantic.error_wrappers import ErrorList +from starlette.exceptions import HTTPException as StarletteHTTPException + + +class HTTPException(StarletteHTTPException): + def __init__( + self, + status_code: int, + detail: Any = None, + headers: Optional[Dict[str, Any]] = None, + ) -> None: + super().__init__(status_code=status_code, detail=detail, headers=headers) + + +RequestErrorModel: Type[BaseModel] = create_model("Request") +WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket") + + +class FastAPIError(RuntimeError): + """ + A generic, FastAPI-specific error. + """ + + +class RequestValidationError(ValidationError): + def __init__(self, errors: Sequence[ErrorList], *, body: Any = None) -> None: + self.body = body + super().__init__(errors, RequestErrorModel) + + +class WebSocketRequestValidationError(ValidationError): + def __init__(self, errors: Sequence[ErrorList]) -> None: + super().__init__(errors, WebSocketErrorModel) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/logger.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/logger.py new file mode 100644 index 00000000..5b2c4ad5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/logger.py @@ -0,0 +1,3 @@ +import logging + +logger = logging.getLogger("fastapi") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/__init__.py new file mode 100644 index 00000000..620296d5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/__init__.py @@ -0,0 +1 @@ +from starlette.middleware import Middleware as Middleware diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/asyncexitstack.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/asyncexitstack.py new file mode 100644 index 00000000..503a68ac --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/asyncexitstack.py @@ -0,0 +1,28 @@ +from typing import Optional + +from fastapi.concurrency import AsyncExitStack +from starlette.types import ASGIApp, Receive, Scope, Send + + +class AsyncExitStackMiddleware: + def __init__(self, app: ASGIApp, context_name: str = "fastapi_astack") -> None: + self.app = app + self.context_name = context_name + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if AsyncExitStack: + dependency_exception: Optional[Exception] = None + async with AsyncExitStack() as stack: + scope[self.context_name] = stack + try: + await self.app(scope, receive, send) + except Exception as e: + dependency_exception = e + raise e + if dependency_exception: + # This exception was possibly handled by the dependency but it should + # still bubble up so that the ServerErrorMiddleware can return a 500 + # or the ExceptionMiddleware can catch and handle any other exceptions + raise dependency_exception + else: + await self.app(scope, receive, send) # pragma: no cover diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/cors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/cors.py new file mode 100644 index 00000000..8dfaad0d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/cors.py @@ -0,0 +1 @@ +from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/gzip.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/gzip.py new file mode 100644 index 00000000..bbeb2cc7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/gzip.py @@ -0,0 +1 @@ +from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/httpsredirect.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/httpsredirect.py new file mode 100644 index 00000000..b7a3d8e0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/httpsredirect.py @@ -0,0 +1,3 @@ +from starlette.middleware.httpsredirect import ( # noqa + HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/trustedhost.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/trustedhost.py new file mode 100644 index 00000000..08d7e035 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/trustedhost.py @@ -0,0 +1,3 @@ +from starlette.middleware.trustedhost import ( # noqa + TrustedHostMiddleware as TrustedHostMiddleware, +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/wsgi.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/wsgi.py new file mode 100644 index 00000000..c4c6a797 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/middleware/wsgi.py @@ -0,0 +1 @@ +from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/constants.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/constants.py new file mode 100644 index 00000000..1897ad75 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/constants.py @@ -0,0 +1,2 @@ +METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} +REF_PREFIX = "#/components/schemas/" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/docs.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/docs.py new file mode 100644 index 00000000..bf335118 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/docs.py @@ -0,0 +1,203 @@ +import json +from typing import Any, Dict, Optional + +from fastapi.encoders import jsonable_encoder +from starlette.responses import HTMLResponse + +swagger_ui_default_parameters = { + "dom_id": "#swagger-ui", + "layout": "BaseLayout", + "deepLinking": True, + "showExtensions": True, + "showCommonExtensions": True, +} + + +def get_swagger_ui_html( + *, + openapi_url: str, + title: str, + swagger_js_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui-bundle.js", + swagger_css_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui.css", + swagger_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png", + oauth2_redirect_url: Optional[str] = None, + init_oauth: Optional[Dict[str, Any]] = None, + swagger_ui_parameters: Optional[Dict[str, Any]] = None, +) -> HTMLResponse: + current_swagger_ui_parameters = swagger_ui_default_parameters.copy() + if swagger_ui_parameters: + current_swagger_ui_parameters.update(swagger_ui_parameters) + + html = f""" + + + + + + {title} + + +
+
+ + + + + + """ + return HTMLResponse(html) + + +def get_redoc_html( + *, + openapi_url: str, + title: str, + redoc_js_url: str = "https://cdn.jsdelivr.net/npm/redoc@next/bundles/redoc.standalone.js", + redoc_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png", + with_google_fonts: bool = True, +) -> HTMLResponse: + html = f""" + + + + {title} + + + + """ + if with_google_fonts: + html += """ + + """ + html += f""" + + + + + + + + + + + """ + return HTMLResponse(html) + + +def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: + # copied from https://github.com/swagger-api/swagger-ui/blob/v4.14.0/dist/oauth2-redirect.html + html = """ + + + + Swagger UI: OAuth2 Redirect + + + + + + """ + return HTMLResponse(content=html) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/models.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/models.py new file mode 100644 index 00000000..35aa1672 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/models.py @@ -0,0 +1,406 @@ +from enum import Enum +from typing import Any, Callable, Dict, Iterable, List, Optional, Union + +from fastapi.logger import logger +from pydantic import AnyUrl, BaseModel, Field + +try: + import email_validator # type: ignore + + assert email_validator # make autoflake ignore the unused import + from pydantic import EmailStr +except ImportError: # pragma: no cover + + class EmailStr(str): # type: ignore + @classmethod + def __get_validators__(cls) -> Iterable[Callable[..., Any]]: + yield cls.validate + + @classmethod + def validate(cls, v: Any) -> str: + logger.warning( + "email-validator not installed, email fields will be treated as str.\n" + "To install, run: pip install email-validator" + ) + return str(v) + + +class Contact(BaseModel): + name: Optional[str] = None + url: Optional[AnyUrl] = None + email: Optional[EmailStr] = None + + class Config: + extra = "allow" + + +class License(BaseModel): + name: str + url: Optional[AnyUrl] = None + + class Config: + extra = "allow" + + +class Info(BaseModel): + title: str + description: Optional[str] = None + termsOfService: Optional[str] = None + contact: Optional[Contact] = None + license: Optional[License] = None + version: str + + class Config: + extra = "allow" + + +class ServerVariable(BaseModel): + enum: Optional[List[str]] = None + default: str + description: Optional[str] = None + + class Config: + extra = "allow" + + +class Server(BaseModel): + url: Union[AnyUrl, str] + description: Optional[str] = None + variables: Optional[Dict[str, ServerVariable]] = None + + class Config: + extra = "allow" + + +class Reference(BaseModel): + ref: str = Field(alias="$ref") + + +class Discriminator(BaseModel): + propertyName: str + mapping: Optional[Dict[str, str]] = None + + +class XML(BaseModel): + name: Optional[str] = None + namespace: Optional[str] = None + prefix: Optional[str] = None + attribute: Optional[bool] = None + wrapped: Optional[bool] = None + + class Config: + extra = "allow" + + +class ExternalDocumentation(BaseModel): + description: Optional[str] = None + url: AnyUrl + + class Config: + extra = "allow" + + +class Schema(BaseModel): + ref: Optional[str] = Field(default=None, alias="$ref") + title: Optional[str] = None + multipleOf: Optional[float] = None + maximum: Optional[float] = None + exclusiveMaximum: Optional[float] = None + minimum: Optional[float] = None + exclusiveMinimum: Optional[float] = None + maxLength: Optional[int] = Field(default=None, gte=0) + minLength: Optional[int] = Field(default=None, gte=0) + pattern: Optional[str] = None + maxItems: Optional[int] = Field(default=None, gte=0) + minItems: Optional[int] = Field(default=None, gte=0) + uniqueItems: Optional[bool] = None + maxProperties: Optional[int] = Field(default=None, gte=0) + minProperties: Optional[int] = Field(default=None, gte=0) + required: Optional[List[str]] = None + enum: Optional[List[Any]] = None + type: Optional[str] = None + allOf: Optional[List["Schema"]] = None + oneOf: Optional[List["Schema"]] = None + anyOf: Optional[List["Schema"]] = None + not_: Optional["Schema"] = Field(default=None, alias="not") + items: Optional[Union["Schema", List["Schema"]]] = None + properties: Optional[Dict[str, "Schema"]] = None + additionalProperties: Optional[Union["Schema", Reference, bool]] = None + description: Optional[str] = None + format: Optional[str] = None + default: Optional[Any] = None + nullable: Optional[bool] = None + discriminator: Optional[Discriminator] = None + readOnly: Optional[bool] = None + writeOnly: Optional[bool] = None + xml: Optional[XML] = None + externalDocs: Optional[ExternalDocumentation] = None + example: Optional[Any] = None + deprecated: Optional[bool] = None + + class Config: + extra: str = "allow" + + +class Example(BaseModel): + summary: Optional[str] = None + description: Optional[str] = None + value: Optional[Any] = None + externalValue: Optional[AnyUrl] = None + + class Config: + extra = "allow" + + +class ParameterInType(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Encoding(BaseModel): + contentType: Optional[str] = None + headers: Optional[Dict[str, Union["Header", Reference]]] = None + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + + class Config: + extra = "allow" + + +class MediaType(BaseModel): + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + encoding: Optional[Dict[str, Encoding]] = None + + class Config: + extra = "allow" + + +class ParameterBase(BaseModel): + description: Optional[str] = None + required: Optional[bool] = None + deprecated: Optional[bool] = None + # Serialization rules for simple scenarios + style: Optional[str] = None + explode: Optional[bool] = None + allowReserved: Optional[bool] = None + schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") + example: Optional[Any] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + # Serialization rules for more complex scenarios + content: Optional[Dict[str, MediaType]] = None + + class Config: + extra = "allow" + + +class Parameter(ParameterBase): + name: str + in_: ParameterInType = Field(alias="in") + + +class Header(ParameterBase): + pass + + +class RequestBody(BaseModel): + description: Optional[str] = None + content: Dict[str, MediaType] + required: Optional[bool] = None + + class Config: + extra = "allow" + + +class Link(BaseModel): + operationRef: Optional[str] = None + operationId: Optional[str] = None + parameters: Optional[Dict[str, Union[Any, str]]] = None + requestBody: Optional[Union[Any, str]] = None + description: Optional[str] = None + server: Optional[Server] = None + + class Config: + extra = "allow" + + +class Response(BaseModel): + description: str + headers: Optional[Dict[str, Union[Header, Reference]]] = None + content: Optional[Dict[str, MediaType]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + + class Config: + extra = "allow" + + +class Operation(BaseModel): + tags: Optional[List[str]] = None + summary: Optional[str] = None + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + operationId: Optional[str] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + requestBody: Optional[Union[RequestBody, Reference]] = None + # Using Any for Specification Extensions + responses: Dict[str, Union[Response, Any]] + callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None + deprecated: Optional[bool] = None + security: Optional[List[Dict[str, List[str]]]] = None + servers: Optional[List[Server]] = None + + class Config: + extra = "allow" + + +class PathItem(BaseModel): + ref: Optional[str] = Field(default=None, alias="$ref") + summary: Optional[str] = None + description: Optional[str] = None + get: Optional[Operation] = None + put: Optional[Operation] = None + post: Optional[Operation] = None + delete: Optional[Operation] = None + options: Optional[Operation] = None + head: Optional[Operation] = None + patch: Optional[Operation] = None + trace: Optional[Operation] = None + servers: Optional[List[Server]] = None + parameters: Optional[List[Union[Parameter, Reference]]] = None + + class Config: + extra = "allow" + + +class SecuritySchemeType(Enum): + apiKey = "apiKey" + http = "http" + oauth2 = "oauth2" + openIdConnect = "openIdConnect" + + +class SecurityBase(BaseModel): + type_: SecuritySchemeType = Field(alias="type") + description: Optional[str] = None + + class Config: + extra = "allow" + + +class APIKeyIn(Enum): + query = "query" + header = "header" + cookie = "cookie" + + +class APIKey(SecurityBase): + type_ = Field(SecuritySchemeType.apiKey, alias="type") + in_: APIKeyIn = Field(alias="in") + name: str + + +class HTTPBase(SecurityBase): + type_ = Field(SecuritySchemeType.http, alias="type") + scheme: str + + +class HTTPBearer(HTTPBase): + scheme = "bearer" + bearerFormat: Optional[str] = None + + +class OAuthFlow(BaseModel): + refreshUrl: Optional[str] = None + scopes: Dict[str, str] = {} + + class Config: + extra = "allow" + + +class OAuthFlowImplicit(OAuthFlow): + authorizationUrl: str + + +class OAuthFlowPassword(OAuthFlow): + tokenUrl: str + + +class OAuthFlowClientCredentials(OAuthFlow): + tokenUrl: str + + +class OAuthFlowAuthorizationCode(OAuthFlow): + authorizationUrl: str + tokenUrl: str + + +class OAuthFlows(BaseModel): + implicit: Optional[OAuthFlowImplicit] = None + password: Optional[OAuthFlowPassword] = None + clientCredentials: Optional[OAuthFlowClientCredentials] = None + authorizationCode: Optional[OAuthFlowAuthorizationCode] = None + + class Config: + extra = "allow" + + +class OAuth2(SecurityBase): + type_ = Field(SecuritySchemeType.oauth2, alias="type") + flows: OAuthFlows + + +class OpenIdConnect(SecurityBase): + type_ = Field(SecuritySchemeType.openIdConnect, alias="type") + openIdConnectUrl: str + + +SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] + + +class Components(BaseModel): + schemas: Optional[Dict[str, Union[Schema, Reference]]] = None + responses: Optional[Dict[str, Union[Response, Reference]]] = None + parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None + examples: Optional[Dict[str, Union[Example, Reference]]] = None + requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None + headers: Optional[Dict[str, Union[Header, Reference]]] = None + securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None + links: Optional[Dict[str, Union[Link, Reference]]] = None + # Using Any for Specification Extensions + callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None + + class Config: + extra = "allow" + + +class Tag(BaseModel): + name: str + description: Optional[str] = None + externalDocs: Optional[ExternalDocumentation] = None + + class Config: + extra = "allow" + + +class OpenAPI(BaseModel): + openapi: str + info: Info + servers: Optional[List[Server]] = None + # Using Any for Specification Extensions + paths: Dict[str, Union[PathItem, Any]] + components: Optional[Components] = None + security: Optional[List[Dict[str, List[str]]]] = None + tags: Optional[List[Tag]] = None + externalDocs: Optional[ExternalDocumentation] = None + + class Config: + extra = "allow" + + +Schema.update_forward_refs() +Operation.update_forward_refs() +Encoding.update_forward_refs() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/utils.py new file mode 100644 index 00000000..4d5741f3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/openapi/utils.py @@ -0,0 +1,441 @@ +import http.client +import inspect +import warnings +from enum import Enum +from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast + +from fastapi import routing +from fastapi.datastructures import DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import get_flat_dependant, get_flat_params +from fastapi.encoders import jsonable_encoder +from fastapi.openapi.constants import METHODS_WITH_BODY, REF_PREFIX +from fastapi.openapi.models import OpenAPI +from fastapi.params import Body, Param +from fastapi.responses import Response +from fastapi.utils import ( + deep_dict_update, + generate_operation_id_for_path, + get_model_definitions, + is_body_allowed_for_status_code, +) +from pydantic import BaseModel +from pydantic.fields import ModelField, Undefined +from pydantic.schema import ( + field_schema, + get_flat_models_from_fields, + get_model_name_map, +) +from pydantic.utils import lenient_issubclass +from starlette.responses import JSONResponse +from starlette.routing import BaseRoute +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + +validation_error_definition = { + "title": "ValidationError", + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, + }, + "msg": {"title": "Message", "type": "string"}, + "type": {"title": "Error Type", "type": "string"}, + }, + "required": ["loc", "msg", "type"], +} + +validation_error_response_definition = { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "detail": { + "title": "Detail", + "type": "array", + "items": {"$ref": REF_PREFIX + "ValidationError"}, + } + }, +} + +status_code_ranges: Dict[str, str] = { + "1XX": "Information", + "2XX": "Success", + "3XX": "Redirection", + "4XX": "Client Error", + "5XX": "Server Error", + "DEFAULT": "Default Response", +} + + +def get_openapi_security_definitions( + flat_dependant: Dependant, +) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + security_definitions = {} + operation_security = [] + for security_requirement in flat_dependant.security_requirements: + security_definition = jsonable_encoder( + security_requirement.security_scheme.model, + by_alias=True, + exclude_none=True, + ) + security_name = security_requirement.security_scheme.scheme_name + security_definitions[security_name] = security_definition + operation_security.append({security_name: security_requirement.scopes}) + return security_definitions, operation_security + + +def get_openapi_operation_parameters( + *, + all_route_params: Sequence[ModelField], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> List[Dict[str, Any]]: + parameters = [] + for param in all_route_params: + field_info = param.field_info + field_info = cast(Param, field_info) + if not field_info.include_in_schema: + continue + parameter = { + "name": param.alias, + "in": field_info.in_.value, + "required": param.required, + "schema": field_schema( + param, model_name_map=model_name_map, ref_prefix=REF_PREFIX + )[0], + } + if field_info.description: + parameter["description"] = field_info.description + if field_info.examples: + parameter["examples"] = jsonable_encoder(field_info.examples) + elif field_info.example != Undefined: + parameter["example"] = jsonable_encoder(field_info.example) + if field_info.deprecated: + parameter["deprecated"] = field_info.deprecated + parameters.append(parameter) + return parameters + + +def get_openapi_operation_request_body( + *, + body_field: Optional[ModelField], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> Optional[Dict[str, Any]]: + if not body_field: + return None + assert isinstance(body_field, ModelField) + body_schema, _, _ = field_schema( + body_field, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + field_info = cast(Body, body_field.field_info) + request_media_type = field_info.media_type + required = body_field.required + request_body_oai: Dict[str, Any] = {} + if required: + request_body_oai["required"] = required + request_media_content: Dict[str, Any] = {"schema": body_schema} + if field_info.examples: + request_media_content["examples"] = jsonable_encoder(field_info.examples) + elif field_info.example != Undefined: + request_media_content["example"] = jsonable_encoder(field_info.example) + request_body_oai["content"] = {request_media_type: request_media_content} + return request_body_oai + + +def generate_operation_id( + *, route: routing.APIRoute, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.openapi.utils.generate_operation_id() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + if route.operation_id: + return route.operation_id + path: str = route.path_format + return generate_operation_id_for_path(name=route.name, path=path, method=method) + + +def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: + if route.summary: + return route.summary + return route.name.replace("_", " ").title() + + +def get_openapi_operation_metadata( + *, route: routing.APIRoute, method: str, operation_ids: Set[str] +) -> Dict[str, Any]: + operation: Dict[str, Any] = {} + if route.tags: + operation["tags"] = route.tags + operation["summary"] = generate_operation_summary(route=route, method=method) + if route.description: + operation["description"] = route.description + operation_id = route.operation_id or route.unique_id + if operation_id in operation_ids: + message = ( + f"Duplicate Operation ID {operation_id} for function " + + f"{route.endpoint.__name__}" + ) + file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") + if file_name: + message += f" at {file_name}" + warnings.warn(message) + operation_ids.add(operation_id) + operation["operationId"] = operation_id + if route.deprecated: + operation["deprecated"] = route.deprecated + return operation + + +def get_openapi_path( + *, route: routing.APIRoute, model_name_map: Dict[type, str], operation_ids: Set[str] +) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]: + path = {} + security_schemes: Dict[str, Any] = {} + definitions: Dict[str, Any] = {} + assert route.methods is not None, "Methods must be a list" + if isinstance(route.response_class, DefaultPlaceholder): + current_response_class: Type[Response] = route.response_class.value + else: + current_response_class = route.response_class + assert current_response_class, "A response class is needed to generate OpenAPI" + route_response_media_type: Optional[str] = current_response_class.media_type + if route.include_in_schema: + for method in route.methods: + operation = get_openapi_operation_metadata( + route=route, method=method, operation_ids=operation_ids + ) + parameters: List[Dict[str, Any]] = [] + flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) + security_definitions, operation_security = get_openapi_security_definitions( + flat_dependant=flat_dependant + ) + if operation_security: + operation.setdefault("security", []).extend(operation_security) + if security_definitions: + security_schemes.update(security_definitions) + all_route_params = get_flat_params(route.dependant) + operation_parameters = get_openapi_operation_parameters( + all_route_params=all_route_params, model_name_map=model_name_map + ) + parameters.extend(operation_parameters) + if parameters: + operation["parameters"] = list( + { + (param["in"], param["name"]): param for param in parameters + }.values() + ) + if method in METHODS_WITH_BODY: + request_body_oai = get_openapi_operation_request_body( + body_field=route.body_field, model_name_map=model_name_map + ) + if request_body_oai: + operation["requestBody"] = request_body_oai + if route.callbacks: + callbacks = {} + for callback in route.callbacks: + if isinstance(callback, routing.APIRoute): + ( + cb_path, + cb_security_schemes, + cb_definitions, + ) = get_openapi_path( + route=callback, + model_name_map=model_name_map, + operation_ids=operation_ids, + ) + callbacks[callback.name] = {callback.path: cb_path} + operation["callbacks"] = callbacks + if route.status_code is not None: + status_code = str(route.status_code) + else: + # It would probably make more sense for all response classes to have an + # explicit default status_code, and to extract it from them, instead of + # doing this inspection tricks, that would probably be in the future + # TODO: probably make status_code a default class attribute for all + # responses in Starlette + response_signature = inspect.signature(current_response_class.__init__) + status_code_param = response_signature.parameters.get("status_code") + if status_code_param is not None: + if isinstance(status_code_param.default, int): + status_code = str(status_code_param.default) + operation.setdefault("responses", {}).setdefault(status_code, {})[ + "description" + ] = route.response_description + if route_response_media_type and is_body_allowed_for_status_code( + route.status_code + ): + response_schema = {"type": "string"} + if lenient_issubclass(current_response_class, JSONResponse): + if route.response_field: + response_schema, _, _ = field_schema( + route.response_field, + model_name_map=model_name_map, + ref_prefix=REF_PREFIX, + ) + else: + response_schema = {} + operation.setdefault("responses", {}).setdefault( + status_code, {} + ).setdefault("content", {}).setdefault(route_response_media_type, {})[ + "schema" + ] = response_schema + if route.responses: + operation_responses = operation.setdefault("responses", {}) + for ( + additional_status_code, + additional_response, + ) in route.responses.items(): + process_response = additional_response.copy() + process_response.pop("model", None) + status_code_key = str(additional_status_code).upper() + if status_code_key == "DEFAULT": + status_code_key = "default" + openapi_response = operation_responses.setdefault( + status_code_key, {} + ) + assert isinstance( + process_response, dict + ), "An additional response must be a dict" + field = route.response_fields.get(additional_status_code) + additional_field_schema: Optional[Dict[str, Any]] = None + if field: + additional_field_schema, _, _ = field_schema( + field, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + media_type = route_response_media_type or "application/json" + additional_schema = ( + process_response.setdefault("content", {}) + .setdefault(media_type, {}) + .setdefault("schema", {}) + ) + deep_dict_update(additional_schema, additional_field_schema) + status_text: Optional[str] = status_code_ranges.get( + str(additional_status_code).upper() + ) or http.client.responses.get(int(additional_status_code)) + description = ( + process_response.get("description") + or openapi_response.get("description") + or status_text + or "Additional Response" + ) + deep_dict_update(openapi_response, process_response) + openapi_response["description"] = description + http422 = str(HTTP_422_UNPROCESSABLE_ENTITY) + if (all_route_params or route.body_field) and not any( + [ + status in operation["responses"] + for status in [http422, "4XX", "default"] + ] + ): + operation["responses"][http422] = { + "description": "Validation Error", + "content": { + "application/json": { + "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} + } + }, + } + if "ValidationError" not in definitions: + definitions.update( + { + "ValidationError": validation_error_definition, + "HTTPValidationError": validation_error_response_definition, + } + ) + if route.openapi_extra: + deep_dict_update(operation, route.openapi_extra) + path[method.lower()] = operation + return path, security_schemes, definitions + + +def get_flat_models_from_routes( + routes: Sequence[BaseRoute], +) -> Set[Union[Type[BaseModel], Type[Enum]]]: + body_fields_from_routes: List[ModelField] = [] + responses_from_routes: List[ModelField] = [] + request_fields_from_routes: List[ModelField] = [] + callback_flat_models: Set[Union[Type[BaseModel], Type[Enum]]] = set() + for route in routes: + if getattr(route, "include_in_schema", None) and isinstance( + route, routing.APIRoute + ): + if route.body_field: + assert isinstance( + route.body_field, ModelField + ), "A request body must be a Pydantic Field" + body_fields_from_routes.append(route.body_field) + if route.response_field: + responses_from_routes.append(route.response_field) + if route.response_fields: + responses_from_routes.extend(route.response_fields.values()) + if route.callbacks: + callback_flat_models |= get_flat_models_from_routes(route.callbacks) + params = get_flat_params(route.dependant) + request_fields_from_routes.extend(params) + + flat_models = callback_flat_models | get_flat_models_from_fields( + body_fields_from_routes + responses_from_routes + request_fields_from_routes, + known_models=set(), + ) + return flat_models + + +def get_openapi( + *, + title: str, + version: str, + openapi_version: str = "3.0.2", + description: Optional[str] = None, + routes: Sequence[BaseRoute], + tags: Optional[List[Dict[str, Any]]] = None, + servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + terms_of_service: Optional[str] = None, + contact: Optional[Dict[str, Union[str, Any]]] = None, + license_info: Optional[Dict[str, Union[str, Any]]] = None, +) -> Dict[str, Any]: + info: Dict[str, Any] = {"title": title, "version": version} + if description: + info["description"] = description + if terms_of_service: + info["termsOfService"] = terms_of_service + if contact: + info["contact"] = contact + if license_info: + info["license"] = license_info + output: Dict[str, Any] = {"openapi": openapi_version, "info": info} + if servers: + output["servers"] = servers + components: Dict[str, Dict[str, Any]] = {} + paths: Dict[str, Dict[str, Any]] = {} + operation_ids: Set[str] = set() + flat_models = get_flat_models_from_routes(routes) + model_name_map = get_model_name_map(flat_models) + definitions = get_model_definitions( + flat_models=flat_models, model_name_map=model_name_map + ) + for route in routes: + if isinstance(route, routing.APIRoute): + result = get_openapi_path( + route=route, model_name_map=model_name_map, operation_ids=operation_ids + ) + if result: + path, security_schemes, path_definitions = result + if path: + paths.setdefault(route.path_format, {}).update(path) + if security_schemes: + components.setdefault("securitySchemes", {}).update( + security_schemes + ) + if path_definitions: + definitions.update(path_definitions) + if definitions: + components["schemas"] = {k: definitions[k] for k in sorted(definitions)} + if components: + output["components"] = components + output["paths"] = paths + if tags: + output["tags"] = tags + return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/param_functions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/param_functions.py new file mode 100644 index 00000000..1932ef06 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/param_functions.py @@ -0,0 +1,290 @@ +from typing import Any, Callable, Dict, Optional, Sequence + +from fastapi import params +from pydantic.fields import Undefined + + +def Path( # noqa: N802 + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Path( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Query( # noqa: N802 + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Query( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Header( # noqa: N802 + default: Any = Undefined, + *, + alias: Optional[str] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Header( + default=default, + alias=alias, + convert_underscores=convert_underscores, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Cookie( # noqa: N802 + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, +) -> Any: + return params.Cookie( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + deprecated=deprecated, + include_in_schema=include_in_schema, + **extra, + ) + + +def Body( # noqa: N802 + default: Any = Undefined, + *, + embed: bool = False, + media_type: str = "application/json", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.Body( + default=default, + embed=embed, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def Form( # noqa: N802 + default: Any = Undefined, + *, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.Form( + default=default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def File( # noqa: N802 + default: Any = Undefined, + *, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, +) -> Any: + return params.File( + default=default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +def Depends( # noqa: N802 + dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True +) -> Any: + return params.Depends(dependency=dependency, use_cache=use_cache) + + +def Security( # noqa: N802 + dependency: Optional[Callable[..., Any]] = None, + *, + scopes: Optional[Sequence[str]] = None, + use_cache: bool = True, +) -> Any: + return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/params.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/params.py new file mode 100644 index 00000000..5395b98a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/params.py @@ -0,0 +1,380 @@ +from enum import Enum +from typing import Any, Callable, Dict, Optional, Sequence + +from pydantic.fields import FieldInfo, Undefined + + +class ParamTypes(Enum): + query = "query" + header = "header" + path = "path" + cookie = "cookie" + + +class Param(FieldInfo): + in_: ParamTypes + + def __init__( + self, + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.deprecated = deprecated + self.example = example + self.examples = examples + self.include_in_schema = include_in_schema + super().__init__( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + **extra, + ) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Path(Param): + in_ = ParamTypes.path + + def __init__( + self, + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.in_ = self.in_ + super().__init__( + default=..., + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Query(Param): + in_ = ParamTypes.query + + def __init__( + self, + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + super().__init__( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Header(Param): + in_ = ParamTypes.header + + def __init__( + self, + default: Any = Undefined, + *, + alias: Optional[str] = None, + convert_underscores: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + self.convert_underscores = convert_underscores + super().__init__( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Cookie(Param): + in_ = ParamTypes.cookie + + def __init__( + self, + default: Any = Undefined, + *, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + **extra: Any, + ): + super().__init__( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + deprecated=deprecated, + example=example, + examples=examples, + include_in_schema=include_in_schema, + **extra, + ) + + +class Body(FieldInfo): + def __init__( + self, + default: Any = Undefined, + *, + embed: bool = False, + media_type: str = "application/json", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + self.embed = embed + self.media_type = media_type + self.example = example + self.examples = examples + super().__init__( + default=default, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + **extra, + ) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.default})" + + +class Form(Body): + def __init__( + self, + default: Any, + *, + media_type: str = "application/x-www-form-urlencoded", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + super().__init__( + default=default, + embed=True, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +class File(Form): + def __init__( + self, + default: Any, + *, + media_type: str = "multipart/form-data", + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + regex: Optional[str] = None, + example: Any = Undefined, + examples: Optional[Dict[str, Any]] = None, + **extra: Any, + ): + super().__init__( + default=default, + media_type=media_type, + alias=alias, + title=title, + description=description, + gt=gt, + ge=ge, + lt=lt, + le=le, + min_length=min_length, + max_length=max_length, + regex=regex, + example=example, + examples=examples, + **extra, + ) + + +class Depends: + def __init__( + self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True + ): + self.dependency = dependency + self.use_cache = use_cache + + def __repr__(self) -> str: + attr = getattr(self.dependency, "__name__", type(self.dependency).__name__) + cache = "" if self.use_cache else ", use_cache=False" + return f"{self.__class__.__name__}({attr}{cache})" + + +class Security(Depends): + def __init__( + self, + dependency: Optional[Callable[..., Any]] = None, + *, + scopes: Optional[Sequence[str]] = None, + use_cache: bool = True, + ): + super().__init__(dependency=dependency, use_cache=use_cache) + self.scopes = scopes or [] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/requests.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/requests.py new file mode 100644 index 00000000..d16552c0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/requests.py @@ -0,0 +1,2 @@ +from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 +from starlette.requests import Request as Request # noqa: F401 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/responses.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/responses.py new file mode 100644 index 00000000..6cd79315 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/responses.py @@ -0,0 +1,34 @@ +from typing import Any + +from starlette.responses import FileResponse as FileResponse # noqa +from starlette.responses import HTMLResponse as HTMLResponse # noqa +from starlette.responses import JSONResponse as JSONResponse # noqa +from starlette.responses import PlainTextResponse as PlainTextResponse # noqa +from starlette.responses import RedirectResponse as RedirectResponse # noqa +from starlette.responses import Response as Response # noqa +from starlette.responses import StreamingResponse as StreamingResponse # noqa + +try: + import ujson +except ImportError: # pragma: nocover + ujson = None # type: ignore + + +try: + import orjson +except ImportError: # pragma: nocover + orjson = None # type: ignore + + +class UJSONResponse(JSONResponse): + def render(self, content: Any) -> bytes: + assert ujson is not None, "ujson must be installed to use UJSONResponse" + return ujson.dumps(content, ensure_ascii=False).encode("utf-8") + + +class ORJSONResponse(JSONResponse): + media_type = "application/json" + + def render(self, content: Any) -> bytes: + assert orjson is not None, "orjson must be installed to use ORJSONResponse" + return orjson.dumps(content) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/routing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/routing.py new file mode 100644 index 00000000..1ac4b388 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/routing.py @@ -0,0 +1,1237 @@ +import asyncio +import dataclasses +import email.message +import inspect +import json +from enum import Enum, IntEnum +from typing import ( + Any, + Callable, + Coroutine, + Dict, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, +) + +from fastapi import params +from fastapi.datastructures import Default, DefaultPlaceholder +from fastapi.dependencies.models import Dependant +from fastapi.dependencies.utils import ( + get_body_field, + get_dependant, + get_parameterless_sub_dependant, + solve_dependencies, +) +from fastapi.encoders import DictIntStrAny, SetIntStr, jsonable_encoder +from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError +from fastapi.types import DecoratedCallable +from fastapi.utils import ( + create_cloned_field, + create_response_field, + generate_unique_id, + get_value_or_default, + is_body_allowed_for_status_code, +) +from pydantic import BaseModel +from pydantic.error_wrappers import ErrorWrapper, ValidationError +from pydantic.fields import ModelField, Undefined +from starlette import routing +from starlette.concurrency import run_in_threadpool +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import BaseRoute, Match +from starlette.routing import Mount as Mount # noqa +from starlette.routing import ( + compile_path, + get_name, + request_response, + websocket_session, +) +from starlette.status import WS_1008_POLICY_VIOLATION +from starlette.types import ASGIApp, Scope +from starlette.websockets import WebSocket + + +def _prepare_response_content( + res: Any, + *, + exclude_unset: bool, + exclude_defaults: bool = False, + exclude_none: bool = False, +) -> Any: + if isinstance(res, BaseModel): + read_with_orm_mode = getattr(res.__config__, "read_with_orm_mode", None) + if read_with_orm_mode: + # Let from_orm extract the data from this model instead of converting + # it now to a dict. + # Otherwise there's no way to extract lazy data that requires attribute + # access instead of dict iteration, e.g. lazy relationships. + return res + return res.dict( + by_alias=True, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + elif isinstance(res, list): + return [ + _prepare_response_content( + item, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for item in res + ] + elif isinstance(res, dict): + return { + k: _prepare_response_content( + v, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + for k, v in res.items() + } + elif dataclasses.is_dataclass(res): + return dataclasses.asdict(res) + return res + + +async def serialize_response( + *, + field: Optional[ModelField] = None, + response_content: Any, + include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + by_alias: bool = True, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + is_coroutine: bool = True, +) -> Any: + if field: + errors = [] + response_content = _prepare_response_content( + response_content, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + if is_coroutine: + value, errors_ = field.validate(response_content, {}, loc=("response",)) + else: + value, errors_ = await run_in_threadpool( # type: ignore[misc] + field.validate, response_content, {}, loc=("response",) + ) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + if errors: + raise ValidationError(errors, field.type_) + return jsonable_encoder( + value, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + else: + return jsonable_encoder(response_content) + + +async def run_endpoint_function( + *, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool +) -> Any: + # Only called by get_request_handler. Has been split into its own function to + # facilitate profiling endpoints, since inner functions are harder to profile. + assert dependant.call is not None, "dependant.call must be a function" + + if is_coroutine: + return await dependant.call(**values) + else: + return await run_in_threadpool(dependant.call, **values) + + +def get_request_handler( + dependant: Dependant, + body_field: Optional[ModelField] = None, + status_code: Optional[int] = None, + response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse), + response_field: Optional[ModelField] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + dependency_overrides_provider: Optional[Any] = None, +) -> Callable[[Request], Coroutine[Any, Any, Response]]: + assert dependant.call is not None, "dependant.call must be a function" + is_coroutine = asyncio.iscoroutinefunction(dependant.call) + is_body_form = body_field and isinstance(body_field.field_info, params.Form) + if isinstance(response_class, DefaultPlaceholder): + actual_response_class: Type[Response] = response_class.value + else: + actual_response_class = response_class + + async def app(request: Request) -> Response: + try: + body: Any = None + if body_field: + if is_body_form: + body = await request.form() + else: + body_bytes = await request.body() + if body_bytes: + json_body: Any = Undefined + content_type_value = request.headers.get("content-type") + if not content_type_value: + json_body = await request.json() + else: + message = email.message.Message() + message["content-type"] = content_type_value + if message.get_content_maintype() == "application": + subtype = message.get_content_subtype() + if subtype == "json" or subtype.endswith("+json"): + json_body = await request.json() + if json_body != Undefined: + body = json_body + else: + body = body_bytes + except json.JSONDecodeError as e: + raise RequestValidationError( + [ErrorWrapper(e, ("body", e.pos))], body=e.doc + ) from e + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=400, detail="There was an error parsing the body" + ) from e + solved_result = await solve_dependencies( + request=request, + dependant=dependant, + body=body, + dependency_overrides_provider=dependency_overrides_provider, + ) + values, errors, background_tasks, sub_response, _ = solved_result + if errors: + raise RequestValidationError(errors, body=body) + else: + raw_response = await run_endpoint_function( + dependant=dependant, values=values, is_coroutine=is_coroutine + ) + + if isinstance(raw_response, Response): + if raw_response.background is None: + raw_response.background = background_tasks + return raw_response + response_args: Dict[str, Any] = {"background": background_tasks} + # If status_code was set, use it, otherwise use the default from the + # response class, in the case of redirect it's 307 + current_status_code = ( + status_code if status_code else sub_response.status_code + ) + if current_status_code is not None: + response_args["status_code"] = current_status_code + if sub_response.status_code: + response_args["status_code"] = sub_response.status_code + content = await serialize_response( + field=response_field, + response_content=raw_response, + include=response_model_include, + exclude=response_model_exclude, + by_alias=response_model_by_alias, + exclude_unset=response_model_exclude_unset, + exclude_defaults=response_model_exclude_defaults, + exclude_none=response_model_exclude_none, + is_coroutine=is_coroutine, + ) + response = actual_response_class(content, **response_args) + if not is_body_allowed_for_status_code(status_code): + response.body = b"" + response.headers.raw.extend(sub_response.headers.raw) + return response + + return app + + +def get_websocket_app( + dependant: Dependant, dependency_overrides_provider: Optional[Any] = None +) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: + async def app(websocket: WebSocket) -> None: + solved_result = await solve_dependencies( + request=websocket, + dependant=dependant, + dependency_overrides_provider=dependency_overrides_provider, + ) + values, errors, _, _2, _3 = solved_result + if errors: + await websocket.close(code=WS_1008_POLICY_VIOLATION) + raise WebSocketRequestValidationError(errors) + assert dependant.call is not None, "dependant.call must be a function" + await dependant.call(**values) + + return app + + +class APIWebSocketRoute(routing.WebSocketRoute): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + name: Optional[str] = None, + dependency_overrides_provider: Optional[Any] = None, + ) -> None: + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + self.dependant = get_dependant(path=path, call=self.endpoint) + self.app = websocket_session( + get_websocket_app( + dependant=self.dependant, + dependency_overrides_provider=dependency_overrides_provider, + ) + ) + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRoute(routing.Route): + def __init__( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + name: Optional[str] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + dependency_overrides_provider: Optional[Any] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[["APIRoute"], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + self.path = path + self.endpoint = endpoint + self.response_model = response_model + self.summary = summary + self.response_description = response_description + self.deprecated = deprecated + self.operation_id = operation_id + self.response_model_include = response_model_include + self.response_model_exclude = response_model_exclude + self.response_model_by_alias = response_model_by_alias + self.response_model_exclude_unset = response_model_exclude_unset + self.response_model_exclude_defaults = response_model_exclude_defaults + self.response_model_exclude_none = response_model_exclude_none + self.include_in_schema = include_in_schema + self.response_class = response_class + self.dependency_overrides_provider = dependency_overrides_provider + self.callbacks = callbacks + self.openapi_extra = openapi_extra + self.generate_unique_id_function = generate_unique_id_function + self.tags = tags or [] + self.responses = responses or {} + self.name = get_name(endpoint) if name is None else name + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + if methods is None: + methods = ["GET"] + self.methods: Set[str] = {method.upper() for method in methods} + if isinstance(generate_unique_id_function, DefaultPlaceholder): + current_generate_unique_id: Callable[ + ["APIRoute"], str + ] = generate_unique_id_function.value + else: + current_generate_unique_id = generate_unique_id_function + self.unique_id = self.operation_id or current_generate_unique_id(self) + # normalize enums e.g. http.HTTPStatus + if isinstance(status_code, IntEnum): + status_code = int(status_code) + self.status_code = status_code + if self.response_model: + assert is_body_allowed_for_status_code( + status_code + ), f"Status code {status_code} must not have a response body" + response_name = "Response_" + self.unique_id + self.response_field = create_response_field( + name=response_name, type_=self.response_model + ) + # Create a clone of the field, so that a Pydantic submodel is not returned + # as is just because it's an instance of a subclass of a more limited class + # e.g. UserInDB (containing hashed_password) could be a subclass of User + # that doesn't have the hashed_password. But because it's a subclass, it + # would pass the validation and be returned as is. + # By being a new field, no inheritance will be passed as is. A new model + # will be always created. + self.secure_cloned_response_field: Optional[ + ModelField + ] = create_cloned_field(self.response_field) + else: + self.response_field = None # type: ignore + self.secure_cloned_response_field = None + if dependencies: + self.dependencies = list(dependencies) + else: + self.dependencies = [] + self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") + # if a "form feed" character (page break) is found in the description text, + # truncate description text to the content preceding the first "form feed" + self.description = self.description.split("\f")[0].strip() + response_fields = {} + for additional_status_code, response in self.responses.items(): + assert isinstance(response, dict), "An additional response must be a dict" + model = response.get("model") + if model: + assert is_body_allowed_for_status_code( + additional_status_code + ), f"Status code {additional_status_code} must not have a response body" + response_name = f"Response_{additional_status_code}_{self.unique_id}" + response_field = create_response_field(name=response_name, type_=model) + response_fields[additional_status_code] = response_field + if response_fields: + self.response_fields: Dict[Union[int, str], ModelField] = response_fields + else: + self.response_fields = {} + + assert callable(endpoint), "An endpoint must be a callable" + self.dependant = get_dependant(path=self.path_format, call=self.endpoint) + for depends in self.dependencies[::-1]: + self.dependant.dependencies.insert( + 0, + get_parameterless_sub_dependant(depends=depends, path=self.path_format), + ) + self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id) + self.app = request_response(self.get_route_handler()) + + def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: + return get_request_handler( + dependant=self.dependant, + body_field=self.body_field, + status_code=self.status_code, + response_class=self.response_class, + response_field=self.secure_cloned_response_field, + response_model_include=self.response_model_include, + response_model_exclude=self.response_model_exclude, + response_model_by_alias=self.response_model_by_alias, + response_model_exclude_unset=self.response_model_exclude_unset, + response_model_exclude_defaults=self.response_model_exclude_defaults, + response_model_exclude_none=self.response_model_exclude_none, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + + def matches(self, scope: Scope) -> Tuple[Match, Scope]: + match, child_scope = super().matches(scope) + if match != Match.NONE: + child_scope["route"] = self + return match, child_scope + + +class APIRouter(routing.Router): + def __init__( + self, + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + routes: Optional[List[routing.BaseRoute]] = None, + redirect_slashes: bool = True, + default: Optional[ASGIApp] = None, + dependency_overrides_provider: Optional[Any] = None, + route_class: Type[APIRoute] = APIRoute, + on_startup: Optional[Sequence[Callable[[], Any]]] = None, + on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + super().__init__( + routes=routes, + redirect_slashes=redirect_slashes, + default=default, + on_startup=on_startup, + on_shutdown=on_shutdown, + ) + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + self.prefix = prefix + self.tags: List[Union[str, Enum]] = tags or [] + self.dependencies = list(dependencies or []) or [] + self.deprecated = deprecated + self.include_in_schema = include_in_schema + self.responses = responses or {} + self.callbacks = callbacks or [] + self.dependency_overrides_provider = dependency_overrides_provider + self.route_class = route_class + self.default_response_class = default_response_class + self.generate_unique_id_function = generate_unique_id_function + + def add_api_route( + self, + path: str, + endpoint: Callable[..., Any], + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[Union[Set[str], List[str]]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Union[Type[Response], DefaultPlaceholder] = Default( + JSONResponse + ), + name: Optional[str] = None, + route_class_override: Optional[Type[APIRoute]] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Union[ + Callable[[APIRoute], str], DefaultPlaceholder + ] = Default(generate_unique_id), + ) -> None: + route_class = route_class_override or self.route_class + responses = responses or {} + combined_responses = {**self.responses, **responses} + current_response_class = get_value_or_default( + response_class, self.default_response_class + ) + current_tags = self.tags.copy() + if tags: + current_tags.extend(tags) + current_dependencies = self.dependencies.copy() + if dependencies: + current_dependencies.extend(dependencies) + current_callbacks = self.callbacks.copy() + if callbacks: + current_callbacks.extend(callbacks) + current_generate_unique_id = get_value_or_default( + generate_unique_id_function, self.generate_unique_id_function + ) + route = route_class( + self.prefix + path, + endpoint=endpoint, + response_model=response_model, + status_code=status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=combined_responses, + deprecated=deprecated or self.deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema and self.include_in_schema, + response_class=current_response_class, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + callbacks=current_callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + self.routes.append(route) + + def api_route( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + methods: Optional[List[str]] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_route( + path, + func, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=methods, + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + return func + + return decorator + + def add_api_websocket_route( + self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None + ) -> None: + route = APIWebSocketRoute( + self.prefix + path, + endpoint=endpoint, + name=name, + dependency_overrides_provider=self.dependency_overrides_provider, + ) + self.routes.append(route) + + def websocket( + self, path: str, name: Optional[str] = None + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + def decorator(func: DecoratedCallable) -> DecoratedCallable: + self.add_api_websocket_route(path, func, name=name) + return func + + return decorator + + def include_router( + self, + router: "APIRouter", + *, + prefix: str = "", + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + default_response_class: Type[Response] = Default(JSONResponse), + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + callbacks: Optional[List[BaseRoute]] = None, + deprecated: Optional[bool] = None, + include_in_schema: bool = True, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> None: + if prefix: + assert prefix.startswith("/"), "A path prefix must start with '/'" + assert not prefix.endswith( + "/" + ), "A path prefix must not end with '/', as the routes will start with '/'" + else: + for r in router.routes: + path = getattr(r, "path") + name = getattr(r, "name", "unknown") + if path is not None and not path: + raise Exception( + f"Prefix and path cannot be both empty (path operation: {name})" + ) + if responses is None: + responses = {} + for route in router.routes: + if isinstance(route, APIRoute): + combined_responses = {**responses, **route.responses} + use_response_class = get_value_or_default( + route.response_class, + router.default_response_class, + default_response_class, + self.default_response_class, + ) + current_tags = [] + if tags: + current_tags.extend(tags) + if route.tags: + current_tags.extend(route.tags) + current_dependencies: List[params.Depends] = [] + if dependencies: + current_dependencies.extend(dependencies) + if route.dependencies: + current_dependencies.extend(route.dependencies) + current_callbacks = [] + if callbacks: + current_callbacks.extend(callbacks) + if route.callbacks: + current_callbacks.extend(route.callbacks) + current_generate_unique_id = get_value_or_default( + route.generate_unique_id_function, + router.generate_unique_id_function, + generate_unique_id_function, + self.generate_unique_id_function, + ) + self.add_api_route( + prefix + route.path, + route.endpoint, + response_model=route.response_model, + status_code=route.status_code, + tags=current_tags, + dependencies=current_dependencies, + summary=route.summary, + description=route.description, + response_description=route.response_description, + responses=combined_responses, + deprecated=route.deprecated or deprecated or self.deprecated, + methods=route.methods, + operation_id=route.operation_id, + response_model_include=route.response_model_include, + response_model_exclude=route.response_model_exclude, + response_model_by_alias=route.response_model_by_alias, + response_model_exclude_unset=route.response_model_exclude_unset, + response_model_exclude_defaults=route.response_model_exclude_defaults, + response_model_exclude_none=route.response_model_exclude_none, + include_in_schema=route.include_in_schema + and self.include_in_schema + and include_in_schema, + response_class=use_response_class, + name=route.name, + route_class_override=type(route), + callbacks=current_callbacks, + openapi_extra=route.openapi_extra, + generate_unique_id_function=current_generate_unique_id, + ) + elif isinstance(route, routing.Route): + methods = list(route.methods or []) + self.add_route( + prefix + route.path, + route.endpoint, + methods=methods, + include_in_schema=route.include_in_schema, + name=route.name, + ) + elif isinstance(route, APIWebSocketRoute): + self.add_api_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + elif isinstance(route, routing.WebSocketRoute): + self.add_websocket_route( + prefix + route.path, route.endpoint, name=route.name + ) + for handler in router.on_startup: + self.add_event_handler("startup", handler) + for handler in router.on_shutdown: + self.add_event_handler("shutdown", handler) + + def get( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["GET"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def put( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PUT"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def post( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["POST"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def delete( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["DELETE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def options( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["OPTIONS"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def head( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["HEAD"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def patch( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["PATCH"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) + + def trace( + self, + path: str, + *, + response_model: Any = None, + status_code: Optional[int] = None, + tags: Optional[List[Union[str, Enum]]] = None, + dependencies: Optional[Sequence[params.Depends]] = None, + summary: Optional[str] = None, + description: Optional[str] = None, + response_description: str = "Successful Response", + responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, + deprecated: Optional[bool] = None, + operation_id: Optional[str] = None, + response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, + response_model_by_alias: bool = True, + response_model_exclude_unset: bool = False, + response_model_exclude_defaults: bool = False, + response_model_exclude_none: bool = False, + include_in_schema: bool = True, + response_class: Type[Response] = Default(JSONResponse), + name: Optional[str] = None, + callbacks: Optional[List[BaseRoute]] = None, + openapi_extra: Optional[Dict[str, Any]] = None, + generate_unique_id_function: Callable[[APIRoute], str] = Default( + generate_unique_id + ), + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + + return self.api_route( + path=path, + response_model=response_model, + status_code=status_code, + tags=tags, + dependencies=dependencies, + summary=summary, + description=description, + response_description=response_description, + responses=responses, + deprecated=deprecated, + methods=["TRACE"], + operation_id=operation_id, + response_model_include=response_model_include, + response_model_exclude=response_model_exclude, + response_model_by_alias=response_model_by_alias, + response_model_exclude_unset=response_model_exclude_unset, + response_model_exclude_defaults=response_model_exclude_defaults, + response_model_exclude_none=response_model_exclude_none, + include_in_schema=include_in_schema, + response_class=response_class, + name=name, + callbacks=callbacks, + openapi_extra=openapi_extra, + generate_unique_id_function=generate_unique_id_function, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/__init__.py new file mode 100644 index 00000000..3aa6bf21 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/__init__.py @@ -0,0 +1,15 @@ +from .api_key import APIKeyCookie as APIKeyCookie +from .api_key import APIKeyHeader as APIKeyHeader +from .api_key import APIKeyQuery as APIKeyQuery +from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials +from .http import HTTPBasic as HTTPBasic +from .http import HTTPBasicCredentials as HTTPBasicCredentials +from .http import HTTPBearer as HTTPBearer +from .http import HTTPDigest as HTTPDigest +from .oauth2 import OAuth2 as OAuth2 +from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer +from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer +from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm +from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict +from .oauth2 import SecurityScopes as SecurityScopes +from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/api_key.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/api_key.py new file mode 100644 index 00000000..36ab60e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/api_key.py @@ -0,0 +1,92 @@ +from typing import Optional + +from fastapi.openapi.models import APIKey, APIKeyIn +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN + + +class APIKeyBase(SecurityBase): + pass + + +class APIKeyQuery(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.query}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key: str = request.query_params.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyHeader(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.header}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key: str = request.headers.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key + + +class APIKeyCookie(APIKeyBase): + def __init__( + self, + *, + name: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model: APIKey = APIKey( + **{"in": APIKeyIn.cookie}, name=name, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + api_key = request.cookies.get(self.model.name) + if not api_key: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return api_key diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/base.py new file mode 100644 index 00000000..c43555de --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/base.py @@ -0,0 +1,6 @@ +from fastapi.openapi.models import SecurityBase as SecurityBaseModel + + +class SecurityBase: + model: SecurityBaseModel + scheme_name: str diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/http.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/http.py new file mode 100644 index 00000000..1b473c69 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/http.py @@ -0,0 +1,165 @@ +import binascii +from base64 import b64decode +from typing import Optional + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import HTTPBase as HTTPBaseModel +from fastapi.openapi.models import HTTPBearer as HTTPBearerModel +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from pydantic import BaseModel +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN + + +class HTTPBasicCredentials(BaseModel): + username: str + password: str + + +class HTTPAuthorizationCredentials(BaseModel): + scheme: str + credentials: str + + +class HTTPBase(SecurityBase): + def __init__( + self, + *, + scheme: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme=scheme, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPBasic(HTTPBase): + def __init__( + self, + *, + scheme_name: Optional[str] = None, + realm: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme="basic", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.realm = realm + self.auto_error = auto_error + + async def __call__( # type: ignore + self, request: Request + ) -> Optional[HTTPBasicCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if self.realm: + unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'} + else: + unauthorized_headers = {"WWW-Authenticate": "Basic"} + invalid_user_credentials_exc = HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Invalid authentication credentials", + headers=unauthorized_headers, + ) + if not authorization or scheme.lower() != "basic": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers=unauthorized_headers, + ) + else: + return None + try: + data = b64decode(param).decode("ascii") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise invalid_user_credentials_exc + username, separator, password = data.partition(":") + if not separator: + raise invalid_user_credentials_exc + return HTTPBasicCredentials(username=username, password=password) + + +class HTTPBearer(HTTPBase): + def __init__( + self, + *, + bearerFormat: Optional[str] = None, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + else: + return None + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) + + +class HTTPDigest(HTTPBase): + def __init__( + self, + *, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + self.model = HTTPBaseModel(scheme="digest", description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__( + self, request: Request + ) -> Optional[HTTPAuthorizationCredentials]: + authorization: str = request.headers.get("Authorization") + scheme, credentials = get_authorization_scheme_param(authorization) + if not (authorization and scheme and credentials): + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + if scheme.lower() != "digest": + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Invalid authentication credentials", + ) + return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/oauth2.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/oauth2.py new file mode 100644 index 00000000..653c3010 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/oauth2.py @@ -0,0 +1,220 @@ +from typing import Any, Dict, List, Optional, Union + +from fastapi.exceptions import HTTPException +from fastapi.openapi.models import OAuth2 as OAuth2Model +from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel +from fastapi.param_functions import Form +from fastapi.security.base import SecurityBase +from fastapi.security.utils import get_authorization_scheme_param +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN + + +class OAuth2PasswordRequestForm: + """ + This is a dependency class, use it like: + + @app.post("/login") + def login(form_data: OAuth2PasswordRequestForm = Depends()): + data = form_data.parse() + print(data.username) + print(data.password) + for scope in data.scopes: + print(scope) + if data.client_id: + print(data.client_id) + if data.client_secret: + print(data.client_secret) + return data + + + It creates the following Form request parameters in your endpoint: + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + Nevertheless, this dependency class is permissive and allows not passing it. If you want to enforce it, + use instead the OAuth2PasswordRequestFormStrict dependency. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: str = Form(default=None, regex="password"), + username: str = Form(), + password: str = Form(), + scope: str = Form(default=""), + client_id: Optional[str] = Form(default=None), + client_secret: Optional[str] = Form(default=None), + ): + self.grant_type = grant_type + self.username = username + self.password = password + self.scopes = scope.split() + self.client_id = client_id + self.client_secret = client_secret + + +class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): + """ + This is a dependency class, use it like: + + @app.post("/login") + def login(form_data: OAuth2PasswordRequestFormStrict = Depends()): + data = form_data.parse() + print(data.username) + print(data.password) + for scope in data.scopes: + print(scope) + if data.client_id: + print(data.client_id) + if data.client_secret: + print(data.client_secret) + return data + + + It creates the following Form request parameters in your endpoint: + + grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". + This dependency is strict about it. If you want to be permissive, use instead the + OAuth2PasswordRequestForm dependency class. + username: username string. The OAuth2 spec requires the exact field name "username". + password: password string. The OAuth2 spec requires the exact field name "password". + scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. + "items:read items:write users:read profile openid" + client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) + using HTTP Basic auth, as: client_id:client_secret + """ + + def __init__( + self, + grant_type: str = Form(regex="password"), + username: str = Form(), + password: str = Form(), + scope: str = Form(default=""), + client_id: Optional[str] = Form(default=None), + client_secret: Optional[str] = Form(default=None), + ): + super().__init__( + grant_type=grant_type, + username=username, + password=password, + scope=scope, + client_id=client_id, + client_secret=client_secret, + ) + + +class OAuth2(SecurityBase): + def __init__( + self, + *, + flows: Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]] = OAuthFlowsModel(), + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model = OAuth2Model(flows=flows, description=description) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization + + +class OAuth2PasswordBearer(OAuth2): + def __init__( + self, + tokenUrl: str, + scheme_name: Optional[str] = None, + scopes: Optional[Dict[str, str]] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes}) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None + return param + + +class OAuth2AuthorizationCodeBearer(OAuth2): + def __init__( + self, + authorizationUrl: str, + tokenUrl: str, + refreshUrl: Optional[str] = None, + scheme_name: Optional[str] = None, + scopes: Optional[Dict[str, str]] = None, + description: Optional[str] = None, + auto_error: bool = True, + ): + if not scopes: + scopes = {} + flows = OAuthFlowsModel( + authorizationCode={ + "authorizationUrl": authorizationUrl, + "tokenUrl": tokenUrl, + "refreshUrl": refreshUrl, + "scopes": scopes, + } + ) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + scheme, param = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + return None # pragma: nocover + return param + + +class SecurityScopes: + def __init__(self, scopes: Optional[List[str]] = None): + self.scopes = scopes or [] + self.scope_str = " ".join(self.scopes) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/open_id_connect_url.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/open_id_connect_url.py new file mode 100644 index 00000000..dfe9f7b2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/open_id_connect_url.py @@ -0,0 +1,34 @@ +from typing import Optional + +from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel +from fastapi.security.base import SecurityBase +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.status import HTTP_403_FORBIDDEN + + +class OpenIdConnect(SecurityBase): + def __init__( + self, + *, + openIdConnectUrl: str, + scheme_name: Optional[str] = None, + description: Optional[str] = None, + auto_error: bool = True + ): + self.model = OpenIdConnectModel( + openIdConnectUrl=openIdConnectUrl, description=description + ) + self.scheme_name = scheme_name or self.__class__.__name__ + self.auto_error = auto_error + + async def __call__(self, request: Request) -> Optional[str]: + authorization: str = request.headers.get("Authorization") + if not authorization: + if self.auto_error: + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" + ) + else: + return None + return authorization diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/utils.py new file mode 100644 index 00000000..2da0dd20 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/security/utils.py @@ -0,0 +1,8 @@ +from typing import Tuple + + +def get_authorization_scheme_param(authorization_header_value: str) -> Tuple[str, str]: + if not authorization_header_value: + return "", "" + scheme, _, param = authorization_header_value.partition(" ") + return scheme, param diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/staticfiles.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/staticfiles.py new file mode 100644 index 00000000..299015d4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/staticfiles.py @@ -0,0 +1 @@ +from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/templating.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/templating.py new file mode 100644 index 00000000..0cb86848 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/templating.py @@ -0,0 +1 @@ +from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/testclient.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/testclient.py new file mode 100644 index 00000000..4012406a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/testclient.py @@ -0,0 +1 @@ +from starlette.testclient import TestClient as TestClient # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/types.py new file mode 100644 index 00000000..e0bca463 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/types.py @@ -0,0 +1,3 @@ +from typing import Any, Callable, TypeVar + +DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/utils.py new file mode 100644 index 00000000..0ced0125 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/utils.py @@ -0,0 +1,189 @@ +import functools +import re +import warnings +from dataclasses import is_dataclass +from enum import Enum +from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast + +import fastapi +from fastapi.datastructures import DefaultPlaceholder, DefaultType +from fastapi.openapi.constants import REF_PREFIX +from pydantic import BaseConfig, BaseModel, create_model +from pydantic.class_validators import Validator +from pydantic.fields import FieldInfo, ModelField, UndefinedType +from pydantic.schema import model_process_schema +from pydantic.utils import lenient_issubclass + +if TYPE_CHECKING: # pragma: nocover + from .routing import APIRoute + + +def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: + if status_code is None: + return True + current_status_code = int(status_code) + return not (current_status_code < 200 or current_status_code in {204, 304}) + + +def get_model_definitions( + *, + flat_models: Set[Union[Type[BaseModel], Type[Enum]]], + model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], +) -> Dict[str, Any]: + definitions: Dict[str, Dict[str, Any]] = {} + for model in flat_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, model_name_map=model_name_map, ref_prefix=REF_PREFIX + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + return definitions + + +def get_path_param_names(path: str) -> Set[str]: + return set(re.findall("{(.*?)}", path)) + + +def create_response_field( + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]] = None, + default: Optional[Any] = None, + required: Union[bool, UndefinedType] = True, + model_config: Type[BaseConfig] = BaseConfig, + field_info: Optional[FieldInfo] = None, + alias: Optional[str] = None, +) -> ModelField: + """ + Create a new response field. Raises if type_ is invalid. + """ + class_validators = class_validators or {} + field_info = field_info or FieldInfo() + + response_field = functools.partial( + ModelField, + name=name, + type_=type_, + class_validators=class_validators, + default=default, + required=required, + model_config=model_config, + alias=alias, + ) + + try: + return response_field(field_info=field_info) + except RuntimeError: + raise fastapi.exceptions.FastAPIError( + f"Invalid args for response field! Hint: check that {type_} is a valid pydantic field type" + ) + + +def create_cloned_field( + field: ModelField, + *, + cloned_types: Optional[Dict[Type[BaseModel], Type[BaseModel]]] = None, +) -> ModelField: + # _cloned_types has already cloned types, to support recursive models + if cloned_types is None: + cloned_types = {} + original_type = field.type_ + if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"): + original_type = original_type.__pydantic_model__ + use_type = original_type + if lenient_issubclass(original_type, BaseModel): + original_type = cast(Type[BaseModel], original_type) + use_type = cloned_types.get(original_type) + if use_type is None: + use_type = create_model(original_type.__name__, __base__=original_type) + cloned_types[original_type] = use_type + for f in original_type.__fields__.values(): + use_type.__fields__[f.name] = create_cloned_field( + f, cloned_types=cloned_types + ) + new_field = create_response_field(name=field.name, type_=use_type) + new_field.has_alias = field.has_alias + new_field.alias = field.alias + new_field.class_validators = field.class_validators + new_field.default = field.default + new_field.required = field.required + new_field.model_config = field.model_config + new_field.field_info = field.field_info + new_field.allow_none = field.allow_none + new_field.validate_always = field.validate_always + if field.sub_fields: + new_field.sub_fields = [ + create_cloned_field(sub_field, cloned_types=cloned_types) + for sub_field in field.sub_fields + ] + if field.key_field: + new_field.key_field = create_cloned_field( + field.key_field, cloned_types=cloned_types + ) + new_field.validators = field.validators + new_field.pre_validators = field.pre_validators + new_field.post_validators = field.post_validators + new_field.parse_json = field.parse_json + new_field.shape = field.shape + new_field.populate_validators() + return new_field + + +def generate_operation_id_for_path( + *, name: str, path: str, method: str +) -> str: # pragma: nocover + warnings.warn( + "fastapi.utils.generate_operation_id_for_path() was deprecated, " + "it is not used internally, and will be removed soon", + DeprecationWarning, + stacklevel=2, + ) + operation_id = name + path + operation_id = re.sub(r"\W", "_", operation_id) + operation_id = operation_id + "_" + method.lower() + return operation_id + + +def generate_unique_id(route: "APIRoute") -> str: + operation_id = route.name + route.path_format + operation_id = re.sub(r"\W", "_", operation_id) + assert route.methods + operation_id = operation_id + "_" + list(route.methods)[0].lower() + return operation_id + + +def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None: + for key, value in update_dict.items(): + if ( + key in main_dict + and isinstance(main_dict[key], dict) + and isinstance(value, dict) + ): + deep_dict_update(main_dict[key], value) + elif ( + key in main_dict + and isinstance(main_dict[key], list) + and isinstance(update_dict[key], list) + ): + main_dict[key] = main_dict[key] + update_dict[key] + else: + main_dict[key] = value + + +def get_value_or_default( + first_item: Union[DefaultPlaceholder, DefaultType], + *extra_items: Union[DefaultPlaceholder, DefaultType], +) -> Union[DefaultPlaceholder, DefaultType]: + """ + Pass items or `DefaultPlaceholder`s by descending priority. + + The first one to _not_ be a `DefaultPlaceholder` will be returned. + + Otherwise, the first item (a `DefaultPlaceholder`) will be returned. + """ + items = (first_item,) + extra_items + for item in items: + if not isinstance(item, DefaultPlaceholder): + return item + return first_item diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/websockets.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/websockets.py new file mode 100644 index 00000000..bed672ac --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/fastapi/websockets.py @@ -0,0 +1,2 @@ +from starlette.websockets import WebSocket as WebSocket # noqa +from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/LICENSE.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/LICENSE.txt new file mode 100644 index 00000000..8f080eae --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/METADATA new file mode 100644 index 00000000..8c77b40c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/METADATA @@ -0,0 +1,197 @@ +Metadata-Version: 2.1 +Name: h11 +Version: 0.13.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.6 +License-File: LICENSE.txt +Requires-Dist: dataclasses ; python_version < "3.7" +Requires-Dist: typing-extensions ; python_version < "3.8" + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.6-3.9) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/RECORD new file mode 100644 index 00000000..98ad52af --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/RECORD @@ -0,0 +1,52 @@ +h11-0.13.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.13.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.13.0.dist-info/METADATA,sha256=Fd9foEJycn0gUB9YsXul6neMlYnEU0MRQ8IUBsSOHxE,8245 +h11-0.13.0.dist-info/RECORD,, +h11-0.13.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +h11-0.13.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-37.pyc,, +h11/__pycache__/_abnf.cpython-37.pyc,, +h11/__pycache__/_connection.cpython-37.pyc,, +h11/__pycache__/_events.cpython-37.pyc,, +h11/__pycache__/_headers.cpython-37.pyc,, +h11/__pycache__/_readers.cpython-37.pyc,, +h11/__pycache__/_receivebuffer.cpython-37.pyc,, +h11/__pycache__/_state.cpython-37.pyc,, +h11/__pycache__/_util.cpython-37.pyc,, +h11/__pycache__/_version.cpython-37.pyc,, +h11/__pycache__/_writers.cpython-37.pyc,, +h11/_abnf.py,sha256=tMKqgOEkTHHp8sPd_gmU9Qowe_yXXrihct63RX2zJsg,4637 +h11/_connection.py,sha256=udHjqEO1fOcQUKa3hYIw88DMeoyG4fxiIXKjgE4DwJw,26480 +h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 +h11/_headers.py,sha256=tRwZuFy5Wj4Yi9VVad_s7EqwCgeN6O3TIbcHd5CN_GI,10230 +h11/_readers.py,sha256=TWWoSbLVBfYGzD5dunReTd2QCxz466wjwu-4Fkzk_sQ,8370 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=F8MPHIFMJV3kUPYR3YjrjqjJ1AYp_FZ38UwGr0855lE,13184 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=ye-8iNs3P1TB71VRGlNQe2OxnAe-RupjozAMywAS5z8,686 +h11/_writers.py,sha256=7WBTXyJqFAUqqmLl5adGF8_7UVQdOVa2phL8s8csljI,5063 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +h11/tests/__pycache__/__init__.cpython-37.pyc,, +h11/tests/__pycache__/helpers.cpython-37.pyc,, +h11/tests/__pycache__/test_against_stdlib_http.cpython-37.pyc,, +h11/tests/__pycache__/test_connection.cpython-37.pyc,, +h11/tests/__pycache__/test_events.cpython-37.pyc,, +h11/tests/__pycache__/test_headers.cpython-37.pyc,, +h11/tests/__pycache__/test_helpers.cpython-37.pyc,, +h11/tests/__pycache__/test_io.cpython-37.pyc,, +h11/tests/__pycache__/test_receivebuffer.cpython-37.pyc,, +h11/tests/__pycache__/test_state.cpython-37.pyc,, +h11/tests/__pycache__/test_util.cpython-37.pyc,, +h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 +h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 +h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 +h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 +h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 +h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 +h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 +h11/tests/test_io.py,sha256=gXFSKpcx6n3-Ld0Y8w5kBkom1LZsCq3uHtqdotQ3S2c,16243 +h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 +h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 +h11/tests/test_util.py,sha256=ZWdRng_P-JP-cnvmcBhazBxfyWmEKBB0NLrDy5eq3h0,2970 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/top_level.txt new file mode 100644 index 00000000..0d24def7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11-0.13.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/__init__.py new file mode 100644 index 00000000..989e92c3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_abnf.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_abnf.py new file mode 100644 index 00000000..e6d49e1e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_abnf.py @@ -0,0 +1,129 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"\r\n".format(**globals()) +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_connection.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_connection.py new file mode 100644 index 00000000..d11386f0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_connection.py @@ -0,0 +1,631 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event(self) -> Union[Event, Type[Sentinel]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() # type: ignore[attr-defined] + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[Sentinel]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_events.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_events.py new file mode 100644 index 00000000..075bf8a4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Any, cast, Dict, List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_headers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_headers.py new file mode 100644 index 00000000..acc45969 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_headers.py @@ -0,0 +1,278 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(br"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_readers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_readers.py new file mode 100644 index 00000000..a036d790 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_readers.py @@ -0,0 +1,249 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) + +# Remember that this has to run in O(n) time -- so e.g. the bytearray cast is +# critical. +obs_fold_re = re.compile(br"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n; if + # this is >0 then we discard that many bytes before resuming regular + # de-chunkification. + self._bytes_to_discard = 0 + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard > 0: + data = buf.maybe_extract_at_most(self._bytes_to_discard) + if data is None: + return None + self._bytes_to_discard -= len(data) + if self._bytes_to_discard > 0: + return None + # else, fall through and read some more + assert self._bytes_to_discard == 0 + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = 2 + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Sentinel, Tuple[Sentinel, Sentinel]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_receivebuffer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_receivebuffer.py new file mode 100644 index 00000000..e5c4e08a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_state.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_state.py new file mode 100644 index 00000000..27907688 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_state.py @@ -0,0 +1,363 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server {} event without a pending proposal".format( + server_switch_event + ) + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) # type: ignore + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + "not in a reusable state. self.states={}".format(self.states) + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_util.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_util.py new file mode 100644 index 00000000..67184452 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_version.py new file mode 100644 index 00000000..75d42886 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.13.0" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_writers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_writers.py new file mode 100644 index 00000000..90a8dc0a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Sentinel, Sentinel], Sentinel], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/py.typed new file mode 100644 index 00000000..f5642f79 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/data/test-file b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/data/test-file new file mode 100644 index 00000000..d0be0a6c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/data/test-file @@ -0,0 +1 @@ +92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/helpers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/helpers.py new file mode 100644 index 00000000..571be444 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/helpers.py @@ -0,0 +1,101 @@ +from typing import cast, List, Type, Union, ValuesView + +from .._connection import Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER +from .._util import Sentinel + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + + +def get_all_events(conn: Connection) -> List[Event]: + got_events = [] + while True: + event = conn.next_event() + if event in (NEED_DATA, PAUSED): + break + event = cast(Event, event) + got_events.append(event) + if type(event) is ConnectionClosed: + break + return got_events + + +def receive_and_get(conn: Connection, data: bytes) -> List[Event]: + conn.receive_data(data) + return get_all_events(conn) + + +# Merges adjacent Data events, converts payloads to bytestrings, and removes +# chunk boundaries. +def normalize_data_events(in_events: List[Event]) -> List[Event]: + out_events: List[Event] = [] + for event in in_events: + if type(event) is Data: + event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) + if out_events and type(out_events[-1]) is type(event) is Data: + out_events[-1] = Data( + data=out_events[-1].data + event.data, + chunk_start=out_events[-1].chunk_start, + chunk_end=out_events[-1].chunk_end, + ) + else: + out_events.append(event) + return out_events + + +# Given that we want to write tests that push some events through a Connection +# and check that its state updates appropriately... we might as make a habit +# of pushing them through two Connections with a fake network link in +# between. +class ConnectionPair: + def __init__(self) -> None: + self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} + self.other = {CLIENT: SERVER, SERVER: CLIENT} + + @property + def conns(self) -> ValuesView[Connection]: + return self.conn.values() + + # expect="match" if expect=send_events; expect=[...] to say what expected + def send( + self, + role: Type[Sentinel], + send_events: Union[List[Event], Event], + expect: Union[List[Event], Event, Literal["match"]] = "match", + ) -> bytes: + if not isinstance(send_events, list): + send_events = [send_events] + data = b"" + closed = False + for send_event in send_events: + new_data = self.conn[role].send(send_event) + if new_data is None: + closed = True + else: + data += new_data + # send uses b"" to mean b"", and None to mean closed + # receive uses b"" to mean closed, and None to mean "try again" + # so we have to translate between the two conventions + if data: + self.conn[self.other[role]].receive_data(data) + if closed: + self.conn[self.other[role]].receive_data(b"") + got_events = get_all_events(self.conn[self.other[role]]) + if expect == "match": + expect = send_events + if not isinstance(expect, list): + expect = [expect] + assert got_events == expect + return data diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_against_stdlib_http.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_against_stdlib_http.py new file mode 100644 index 00000000..d2ee1314 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_against_stdlib_http.py @@ -0,0 +1,115 @@ +import json +import os.path +import socket +import socketserver +import threading +from contextlib import closing, contextmanager +from http.server import SimpleHTTPRequestHandler +from typing import Callable, Generator +from urllib.request import urlopen + +import h11 + + +@contextmanager +def socket_server( + handler: Callable[..., socketserver.BaseRequestHandler] +) -> Generator[socketserver.TCPServer, None, None]: + httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) + thread = threading.Thread( + target=httpd.serve_forever, kwargs={"poll_interval": 0.01} + ) + thread.daemon = True + try: + thread.start() + yield httpd + finally: + httpd.shutdown() + + +test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") +with open(test_file_path, "rb") as f: + test_file_data = f.read() + + +class SingleMindedRequestHandler(SimpleHTTPRequestHandler): + def translate_path(self, path: str) -> str: + return test_file_path + + +def test_h11_as_client() -> None: + with socket_server(SingleMindedRequestHandler) as httpd: + with closing(socket.create_connection(httpd.server_address)) as s: + c = h11.Connection(h11.CLIENT) + + s.sendall( + c.send( # type: ignore[arg-type] + h11.Request( + method="GET", target="/foo", headers=[("Host", "localhost")] + ) + ) + ) + s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] + + data = bytearray() + while True: + event = c.next_event() + print(event) + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Response: + assert event.status_code == 200 + if type(event) is h11.Data: + data += event.data + if type(event) is h11.EndOfMessage: + break + assert bytes(data) == test_file_data + + +class H11RequestHandler(socketserver.BaseRequestHandler): + def handle(self) -> None: + with closing(self.request) as s: + c = h11.Connection(h11.SERVER) + request = None + while True: + event = c.next_event() + if event is h11.NEED_DATA: + # Use a small read buffer to make things more challenging + # and exercise more paths :-) + c.receive_data(s.recv(10)) + continue + if type(event) is h11.Request: + request = event + if type(event) is h11.EndOfMessage: + break + assert request is not None + info = json.dumps( + { + "method": request.method.decode("ascii"), + "target": request.target.decode("ascii"), + "headers": { + name.decode("ascii"): value.decode("ascii") + for (name, value) in request.headers + }, + } + ) + s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] + s.sendall(c.send(h11.Data(data=info.encode("ascii")))) + s.sendall(c.send(h11.EndOfMessage())) + + +def test_h11_as_server() -> None: + with socket_server(H11RequestHandler) as httpd: + host, port = httpd.server_address + url = "http://{}:{}/some-path".format(host, port) + with closing(urlopen(url)) as f: + assert f.getcode() == 200 + data = f.read() + info = json.loads(data.decode("ascii")) + print(info) + assert info["method"] == "GET" + assert info["target"] == "/some-path" + assert "urllib" in info["headers"]["user-agent"] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_connection.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_connection.py new file mode 100644 index 00000000..73a27b98 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_connection.py @@ -0,0 +1,1122 @@ +from typing import Any, cast, Dict, List, Optional, Tuple, Type + +import pytest + +from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError, RemoteProtocolError, Sentinel +from .helpers import ConnectionPair, get_all_events, receive_and_get + + +def test__keep_alive() -> None: + assert _keep_alive( + Request(method="GET", target="/", headers=[("Host", "Example.com")]) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "close")], + ) + ) + assert not _keep_alive( + Request( + method="GET", + target="/", + headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], + ) + ) + assert not _keep_alive( + Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] + ) + + assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] + assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) + assert not _keep_alive( + Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) + ) + assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] + + +def test__body_framing() -> None: + def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: + headers = [] + if cl is not None: + headers.append(("Content-Length", str(cl))) + if te: + headers.append(("Transfer-Encoding", "chunked")) + return headers + + def resp( + status_code: int = 200, cl: Optional[int] = None, te: bool = False + ) -> Response: + return Response(status_code=status_code, headers=headers(cl, te)) + + def req(cl: Optional[int] = None, te: bool = False) -> Request: + h = headers(cl, te) + h += [("Host", "example.com")] + return Request(method="GET", target="/", headers=h) + + # Special cases where the headers are ignored: + for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [ + (b"HEAD", resp(**kwargs)), + (b"GET", resp(status_code=204, **kwargs)), + (b"GET", resp(status_code=304, **kwargs)), + ]: + assert _body_framing(meth, r) == ("content-length", (0,)) + + # Transfer-encoding + for kwargs in [{"te": True}, {"cl": 100, "te": True}]: + kwargs = cast(Dict[str, Any], kwargs) + for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore + assert _body_framing(meth, r) == ("chunked", ()) + + # Content-Length + for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore + assert _body_framing(meth, r) == ("content-length", (100,)) + + # No headers + assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore + assert _body_framing(b"GET", resp()) == ("http/1.0", ()) + + +def test_Connection_basics_and_content_length() -> None: + with pytest.raises(ValueError): + Connection("CLIENT") # type: ignore + + p = ConnectionPair() + assert p.conn[CLIENT].our_role is CLIENT + assert p.conn[CLIENT].their_role is SERVER + assert p.conn[SERVER].our_role is SERVER + assert p.conn[SERVER].their_role is CLIENT + + data = p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Content-Length", "10")], + ), + ) + assert data == ( + b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" + ) + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + assert p.conn[CLIENT].our_state is SEND_BODY + assert p.conn[CLIENT].their_state is SEND_RESPONSE + assert p.conn[SERVER].our_state is SEND_RESPONSE + assert p.conn[SERVER].their_state is SEND_BODY + + assert p.conn[CLIENT].their_http_version is None + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + assert data == b"HTTP/1.1 100 \r\n\r\n" + + data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) + assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" + + for conn in p.conns: + assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + assert p.conn[CLIENT].their_http_version == b"1.1" + assert p.conn[SERVER].their_http_version == b"1.1" + + data = p.send(CLIENT, Data(data=b"12345")) + assert data == b"12345" + data = p.send( + CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] + ) + assert data == b"67890" + data = p.send(CLIENT, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + + data = p.send(SERVER, Data(data=b"1234567890")) + assert data == b"1234567890" + data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) + assert data == b"1" + data = p.send(SERVER, EndOfMessage(), expect=[]) + assert data == b"" + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunked() -> None: + p = ConnectionPair() + + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ), + ) + data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) + assert data == b"a\r\n1234567890\r\n" + data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) + assert data == b"5\r\nabcde\r\n" + data = p.send(CLIENT, Data(data=b""), expect=[]) + assert data == b"" + data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) + assert data == b"0\r\nhello: there\r\n\r\n" + + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) + p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) + p.send(SERVER, EndOfMessage()) + + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + + +def test_chunk_boundaries() -> None: + conn = Connection(our_role=SERVER) + + request = ( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + conn.receive_data(request) + assert conn.next_event() == Request( + method="POST", + target="/", + headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], + ) + assert conn.next_event() is NEED_DATA + + conn.receive_data(b"5\r\nhello\r\n") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"5\r\nhel") + assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) + + conn.receive_data(b"l") + assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) + + conn.receive_data(b"o\r\n") + assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) + + conn.receive_data(b"5\r\nhello") + assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) + + conn.receive_data(b"\r\n") + assert conn.next_event() == NEED_DATA + + conn.receive_data(b"0\r\n\r\n") + assert conn.next_event() == EndOfMessage() + + +def test_client_talking_to_http10_server() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) + c.send(EndOfMessage()) + assert c.our_state is DONE + # No content-length, so Http10 framing for body + assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ + Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] + ] + assert c.our_state is MUST_CLOSE + assert receive_and_get(c, b"12345") == [Data(data=b"12345")] + assert receive_and_get(c, b"67890") == [Data(data=b"67890")] + assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] + assert c.their_state is CLOSED + + +def test_server_talking_to_http10_client() -> None: + c = Connection(SERVER) + # No content-length, so no body + # NB: no host header + assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + assert c.their_state is MUST_CLOSE + + # We automatically Connection: close back at them + assert ( + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + + assert c.send(Data(data=b"12345")) == b"12345" + assert c.send(EndOfMessage()) == b"" + assert c.our_state is MUST_CLOSE + + # Check that it works if they do send Content-Length + c = Connection(SERVER) + # NB: no host header + assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ + Request( + method="POST", + target="/", + headers=[("Content-Length", "10")], + http_version="1.0", + ), + Data(data=b"1"), + ] + assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] + assert c.their_state is MUST_CLOSE + assert receive_and_get(c, b"") == [ConnectionClosed()] + + +def test_automatic_transfer_encoding_in_response() -> None: + # Check that in responses, the user can specify either Transfer-Encoding: + # chunked or no framing at all, and in both cases we automatically select + # the right option depending on whether the peer speaks HTTP/1.0 or + # HTTP/1.1 + for user_headers in [ + [("Transfer-Encoding", "chunked")], + [], + # In fact, this even works if Content-Length is set, + # because if both are set then Transfer-Encoding wins + [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], + ]: + user_headers = cast(List[Tuple[str, str]], user_headers) + p = ConnectionPair() + p.send( + CLIENT, + [ + Request(method="GET", target="/", headers=[("Host", "example.com")]), + EndOfMessage(), + ], + ) + # When speaking to HTTP/1.1 client, all of the above cases get + # normalized to Transfer-Encoding: chunked + p.send( + SERVER, + Response(status_code=200, headers=user_headers), + expect=Response( + status_code=200, headers=[("Transfer-Encoding", "chunked")] + ), + ) + + # When speaking to HTTP/1.0 client, all of the above cases get + # normalized to no-framing-headers + c = Connection(SERVER) + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert ( + c.send(Response(status_code=200, headers=user_headers)) + == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" + ) + assert c.send(Data(data=b"12345")) == b"12345" + + +def test_automagic_connection_close_handling() -> None: + p = ConnectionPair() + # If the user explicitly sets Connection: close, then we notice and + # respect it + p.send( + CLIENT, + [ + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Connection", "close")], + ), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states[CLIENT] is MUST_CLOSE + # And if the client sets it, the server automatically echoes it back + p.send( + SERVER, + # no header here... + [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] + # ...but oh look, it arrived anyway + expect=[ + Response(status_code=204, headers=[("connection", "close")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_100_continue() -> None: + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", + target="/", + headers=[ + ("Host", "example.com"), + ("Content-Length", "100"), + ("Expect", "100-continue"), + ], + ), + ) + for conn in p.conns: + assert conn.client_is_waiting_for_100_continue + assert not p.conn[CLIENT].they_are_waiting_for_100_continue + assert p.conn[SERVER].they_are_waiting_for_100_continue + return p + + # Disabled by 100 Continue + p = setup() + p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by a real response + p = setup() + p.send( + SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) + ) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + # Disabled by the client going ahead and sending stuff anyway + p = setup() + p.send(CLIENT, Data(data=b"12345")) + for conn in p.conns: + assert not conn.client_is_waiting_for_100_continue + assert not conn.they_are_waiting_for_100_continue + + +def test_max_incomplete_event_size_countermeasure() -> None: + # Infinitely long headers are definitely not okay + c = Connection(SERVER) + c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") + assert c.next_event() is NEED_DATA + with pytest.raises(RemoteProtocolError): + while True: + c.receive_data(b"a" * 1024) + c.next_event() + + # Checking that the same header is accepted / rejected depending on the + # max_incomplete_event_size setting: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + c.receive_data(b"\r\n\r\n") + assert get_all_events(c) == [ + Request( + method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] + ), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=4000) + c.receive_data(b"GET / HTTP/1.0\r\nBig: ") + c.receive_data(b"a" * 4000) + with pytest.raises(RemoteProtocolError): + c.next_event() + + # Temporarily exceeding the size limit is fine, as long as its done with + # complete events: + c = Connection(SERVER, max_incomplete_event_size=5000) + c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") + c.receive_data(b"\r\n\r\n" + b"a" * 10000) + assert get_all_events(c) == [ + Request( + method="GET", + target="/", + http_version="1.0", + headers=[("Content-Length", "10000")], + ), + Data(data=b"a" * 10000), + EndOfMessage(), + ] + + c = Connection(SERVER, max_incomplete_event_size=100) + # Two pipelined requests to create a way-too-big receive buffer... but + # it's fine because we're not checking + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" + b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 + ) + assert get_all_events(c) == [ + Request(method="GET", target="/1", headers=[("host", "a")]), + EndOfMessage(), + ] + # Even more data comes in, still no problem + c.receive_data(b"X" * 1000) + # We can respond and reuse to get the second pipelined request + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + assert get_all_events(c) == [ + Request(method="GET", target="/2", headers=[("host", "b")]), + EndOfMessage(), + ] + # But once we unpause and try to read the next message, and find that it's + # incomplete and the buffer is *still* way too large, then *that's* a + # problem: + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_reuse_simple() -> None: + p = ConnectionPair() + p.send( + CLIENT, + [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], + ) + p.send( + SERVER, + [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: DONE} + conn.start_next_cycle() + + p.send( + CLIENT, + [ + Request(method="DELETE", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ], + ) + p.send( + SERVER, + [ + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ], + ) + + +def test_pipelining() -> None: + # Client doesn't support pipelining, so we have to do this by hand + c = Connection(SERVER) + assert c.next_event() is NEED_DATA + # 3 requests all bunched up + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" + ) + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.their_state is DONE + assert c.our_state is SEND_RESPONSE + + assert c.next_event() is PAUSED + + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.their_state is DONE + assert c.our_state is DONE + + c.start_next_cycle() + + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("Host", "a.com"), ("Content-Length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ] + assert c.next_event() is PAUSED + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + c.start_next_cycle() + + assert get_all_events(c) == [ + Request(method="GET", target="/3", headers=[("Host", "a.com")]), + EndOfMessage(), + ] + # Doesn't pause this time, no trailing data + assert c.next_event() is NEED_DATA + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + + # Arrival of more data triggers pause + assert c.next_event() is NEED_DATA + c.receive_data(b"SADF") + assert c.next_event() is PAUSED + assert c.trailing_data == (b"SADF", False) + # If EOF arrives while paused, we don't see that either: + c.receive_data(b"") + assert c.trailing_data == (b"SADF", True) + assert c.next_event() is PAUSED + c.receive_data(b"") + assert c.next_event() is PAUSED + # Can't call receive_data with non-empty buf after closing it + with pytest.raises(RuntimeError): + c.receive_data(b"FDSA") + + +def test_protocol_switch() -> None: + for (req, deny, accept) in [ + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept CONNECT, not upgrade + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + ), + ( + Request( + method="CONNECT", + target="example.com:443", + headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], + ), + Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), + # Accept Upgrade, not CONNECT + InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), + ), + ]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(CLIENT, req) + # No switch-related state change stuff yet; the client has to + # finish the request before that kicks in + for conn in p.conns: + assert conn.states[CLIENT] is SEND_BODY + p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) + for conn in p.conns: + assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL + assert p.conn[SERVER].next_event() is PAUSED + return p + + # Test deny case + p = setup() + p.send(SERVER, deny) + for conn in p.conns: + assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} + p.send(SERVER, EndOfMessage()) + # Check that re-use is still allowed after a denial + for conn in p.conns: + conn.start_next_cycle() + + # Test accept case + p = setup() + p.send(SERVER, accept) + for conn in p.conns: + assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + conn.receive_data(b"123") + assert conn.next_event() is PAUSED + conn.receive_data(b"456") + assert conn.next_event() is PAUSED + assert conn.trailing_data == (b"123456", False) + + # Pausing in might-switch, then recovery + # (weird artificial case where the trailing data actually is valid + # HTTP for some reason, because this makes it easier to test the state + # logic) + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) + sc.send(deny) + assert sc.next_event() is PAUSED + sc.send(EndOfMessage()) + sc.start_next_cycle() + assert get_all_events(sc) == [ + Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] + EndOfMessage(), + ] + + # When we're DONE, have no trailing data, and the connection gets + # closed, we report ConnectionClosed(). When we're in might-switch or + # switched, we don't. + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + assert sc.trailing_data == (b"", True) + p.send(SERVER, accept) + assert sc.next_event() is PAUSED + + p = setup() + sc = p.conn[SERVER] + sc.receive_data(b"") + assert sc.next_event() is PAUSED + sc.send(deny) + assert sc.next_event() == ConnectionClosed() + + # You can't send after switching protocols, or while waiting for a + # protocol switch + p = setup() + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send( + Request(method="GET", target="/", headers=[("Host", "a")]) + ) + p = setup() + p.send(SERVER, accept) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(Data(data=b"123")) + + +def test_close_simple() -> None: + # Just immediately closing a new connection without anything having + # happened yet. + for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: + + def setup() -> ConnectionPair: + p = ConnectionPair() + p.send(who_shot_first, ConnectionClosed()) + for conn in p.conns: + assert conn.states == { + who_shot_first: CLOSED, + who_shot_second: MUST_CLOSE, + } + return p + + # You can keep putting b"" into a closed connection, and you keep + # getting ConnectionClosed() out: + p = setup() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + p.conn[who_shot_second].receive_data(b"") + assert p.conn[who_shot_second].next_event() == ConnectionClosed() + # Second party can close... + p = setup() + p.send(who_shot_second, ConnectionClosed()) + for conn in p.conns: + assert conn.our_state is CLOSED + assert conn.their_state is CLOSED + # But trying to receive new data on a closed connection is a + # RuntimeError (not ProtocolError, because the problem here isn't + # violation of HTTP, it's violation of physics) + p = setup() + with pytest.raises(RuntimeError): + p.conn[who_shot_second].receive_data(b"123") + # And receiving new data on a MUST_CLOSE connection is a ProtocolError + p = setup() + p.conn[who_shot_first].receive_data(b"GET") + with pytest.raises(RemoteProtocolError): + p.conn[who_shot_first].next_event() + + +def test_close_different_states() -> None: + req = [ + Request(method="GET", target="/foo", headers=[("Host", "a")]), + EndOfMessage(), + ] + resp = [ + Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), + EndOfMessage(), + ] + + # Client before request + p = ConnectionPair() + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + + # Client after request + p = ConnectionPair() + p.send(CLIENT, req) + p.send(CLIENT, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + + # Server after request -> not allowed + p = ConnectionPair() + p.send(CLIENT, req) + with pytest.raises(LocalProtocolError): + p.conn[SERVER].send(ConnectionClosed()) + p.conn[CLIENT].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[CLIENT].next_event() + + # Server after response + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(SERVER, ConnectionClosed()) + for conn in p.conns: + assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + # Both after closing (ConnectionClosed() is idempotent) + p = ConnectionPair() + p.send(CLIENT, req) + p.send(SERVER, resp) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + p.send(CLIENT, ConnectionClosed()) + p.send(SERVER, ConnectionClosed()) + + # In the middle of sending -> not allowed + p = ConnectionPair() + p.send( + CLIENT, + Request( + method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] + ), + ) + with pytest.raises(LocalProtocolError): + p.conn[CLIENT].send(ConnectionClosed()) + p.conn[SERVER].receive_data(b"") + with pytest.raises(RemoteProtocolError): + p.conn[SERVER].next_event() + + +# Receive several requests and then client shuts down their side of the +# connection; we can respond to each +def test_pipelined_close() -> None: + c = Connection(SERVER) + # 2 requests then a close + c.receive_data( + b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"12345" + b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" + b"67890" + ) + c.receive_data(b"") + assert get_all_events(c) == [ + Request( + method="GET", + target="/1", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"12345"), + EndOfMessage(), + ] + assert c.states[CLIENT] is DONE + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states[SERVER] is DONE + c.start_next_cycle() + assert get_all_events(c) == [ + Request( + method="GET", + target="/2", + headers=[("host", "a.com"), ("content-length", "5")], + ), + Data(data=b"67890"), + EndOfMessage(), + ConnectionClosed(), + ] + assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} + c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] + c.send(EndOfMessage()) + assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} + c.send(ConnectionClosed()) + assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} + + +def test_sendfile() -> None: + class SendfilePlaceholder: + def __len__(self) -> int: + return 10 + + placeholder = SendfilePlaceholder() + + def setup( + header: Tuple[str, str], http_version: str + ) -> Tuple[Connection, Optional[List[bytes]]]: + c = Connection(SERVER) + receive_and_get( + c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") + ) + headers = [] + if header: + headers.append(header) + c.send(Response(status_code=200, headers=headers)) + return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore + + c, data = setup(("Content-Length", "10"), "1.1") + assert data == [placeholder] # type: ignore + # Raises an error if the connection object doesn't think we've sent + # exactly 10 bytes + c.send(EndOfMessage()) + + _, data = setup(("Transfer-Encoding", "chunked"), "1.1") + assert placeholder in data # type: ignore + data[data.index(placeholder)] = b"x" * 10 # type: ignore + assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore + + c, data = setup(None, "1.0") # type: ignore + assert data == [placeholder] # type: ignore + assert c.our_state is SEND_BODY + + +def test_errors() -> None: + # After a receive error, you can't receive + for role in [CLIENT, SERVER]: + c = Connection(our_role=role) + c.receive_data(b"gibberish\r\n\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + # Now any attempt to receive continues to raise + assert c.their_state is ERROR + assert c.our_state is not ERROR + print(c._cstate.states) + with pytest.raises(RemoteProtocolError): + c.next_event() + # But we can still yell at the client for sending us gibberish + if role is SERVER: + assert ( + c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] + == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" + ) + + # After an error sending, you can no longer send + # (This is especially important for things like content-length errors, + # where there's complex internal state being modified) + def conn(role: Type[Sentinel]) -> Connection: + c = Connection(our_role=role) + if role is SERVER: + # Put it into the state where it *could* send a response... + receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") + assert c.our_state is SEND_RESPONSE + return c + + for role in [CLIENT, SERVER]: + if role is CLIENT: + # This HTTP/1.0 request won't be detected as bad until after we go + # through the state machine and hit the writing code + good = Request(method="GET", target="/", headers=[("Host", "example.com")]) + bad = Request( + method="GET", + target="/", + headers=[("Host", "example.com")], + http_version="1.0", + ) + elif role is SERVER: + good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] + bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] + # Make sure 'good' actually is good + c = conn(role) + c.send(good) + assert c.our_state is not ERROR + # Do that again, but this time sending 'bad' first + c = conn(role) + with pytest.raises(LocalProtocolError): + c.send(bad) + assert c.our_state is ERROR + assert c.their_state is not ERROR + # Now 'good' is not so good + with pytest.raises(LocalProtocolError): + c.send(good) + + # And check send_failed() too + c = conn(role) + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + # This is idempotent + c.send_failed() + assert c.our_state is ERROR + assert c.their_state is not ERROR + + +def test_idle_receive_nothing() -> None: + # At one point this incorrectly raised an error + for role in [CLIENT, SERVER]: + c = Connection(role) + assert c.next_event() is NEED_DATA + + +def test_connection_drop() -> None: + c = Connection(SERVER) + c.receive_data(b"GET /") + assert c.next_event() is NEED_DATA + c.receive_data(b"") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +def test_408_request_timeout() -> None: + # Should be able to send this spontaneously as a server without seeing + # anything from client + p = ConnectionPair() + p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) + + +# This used to raise IndexError +def test_empty_request() -> None: + c = Connection(SERVER) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to raise IndexError +def test_empty_response() -> None: + c = Connection(CLIENT) + c.send(Request(method="GET", target="/", headers=[("Host", "a")])) + c.receive_data(b"\r\n") + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello + ], +) +def test_early_detection_of_invalid_request(data: bytes) -> None: + c = Connection(SERVER) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +@pytest.mark.parametrize( + "data", + [ + b"\x00", + b"\x20", + b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello + ], +) +def test_early_detection_of_invalid_response(data: bytes) -> None: + c = Connection(CLIENT) + # Early detection should occur before even receiving a `\r\n` + c.receive_data(data) + with pytest.raises(RemoteProtocolError): + c.next_event() + + +# This used to give different headers for HEAD and GET. +# The correct way to handle HEAD is to put whatever headers we *would* have +# put if it were a GET -- even though we know that for HEAD, those headers +# will be ignored. +def test_HEAD_framing_headers() -> None: + def setup(method: bytes, http_version: bytes) -> Connection: + c = Connection(SERVER) + c.receive_data( + method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert type(c.next_event()) is EndOfMessage + return c + + for method in [b"GET", b"HEAD"]: + # No Content-Length, HTTP/1.1 peer, should use chunked + c = setup(method, b"1.1") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + # No Content-Length, HTTP/1.0 peer, frame with connection: close + c = setup(method, b"1.0") + assert ( + c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] + b"Connection: close\r\n\r\n" + ) + + # Content-Length + Transfer-Encoding, TE wins + c = setup(method, b"1.1") + assert ( + c.send( + Response( + status_code=200, + headers=[ + ("Content-Length", "100"), + ("Transfer-Encoding", "chunked"), + ], + ) + ) + == b"HTTP/1.1 200 \r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + + +def test_special_exceptions_for_lost_connection_in_message_body() -> None: + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"12345") + assert c.next_event() == Data(data=b"12345") + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "received 5 bytes" in str(excinfo.value) + assert "expected 100" in str(excinfo.value) + + c = Connection(SERVER) + c.receive_data( + b"POST / HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Transfer-Encoding: chunked\r\n\r\n" + ) + assert type(c.next_event()) is Request + assert c.next_event() is NEED_DATA + c.receive_data(b"8\r\n012345") + assert c.next_event().data == b"012345" # type: ignore + c.receive_data(b"") + with pytest.raises(RemoteProtocolError) as excinfo: + c.next_event() + assert "incomplete chunked read" in str(excinfo.value) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_events.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_events.py new file mode 100644 index 00000000..bc6c3137 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_events.py @@ -0,0 +1,150 @@ +from http import HTTPStatus + +import pytest + +from .. import _events +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._util import LocalProtocolError + + +def test_events() -> None: + with pytest.raises(LocalProtocolError): + # Missing Host: + req = Request( + method="GET", target="/", headers=[("a", "b")], http_version="1.1" + ) + # But this is okay (HTTP/1.0) + req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") + # fields are normalized + assert req.method == b"GET" + assert req.target == b"/" + assert req.headers == [(b"a", b"b")] + assert req.http_version == b"1.0" + + # This is also okay -- has a Host (with weird capitalization, which is ok) + req = Request( + method="GET", + target="/", + headers=[("a", "b"), ("hOSt", "example.com")], + http_version="1.1", + ) + # we normalize header capitalization + assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] + + # Multiple host is bad too + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.1", + ) + # Even for HTTP/1.0 + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Host", "a")], + http_version="1.0", + ) + + # Header values are validated + for bad_char in "\x00\r\n\f\v": + with pytest.raises(LocalProtocolError): + req = Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd" + bad_char)], + http_version="1.0", + ) + + # But for compatibility we allow non-whitespace control characters, even + # though they're forbidden by the spec. + Request( + method="GET", + target="/", + headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], + http_version="1.0", + ) + + # Request target is validated + for bad_byte in b"\x00\x20\x7f\xee": + target = bytearray(b"/") + target.append(bad_byte) + with pytest.raises(LocalProtocolError): + Request( + method="GET", target=target, headers=[("Host", "a")], http_version="1.1" + ) + + # Request method is validated + with pytest.raises(LocalProtocolError): + Request( + method="GET / HTTP/1.1", + target=target, + headers=[("Host", "a")], + http_version="1.1", + ) + + ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) + assert ir.status_code == 100 + assert ir.headers == [(b"host", b"a")] + assert ir.http_version == b"1.1" + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=200, headers=[("Host", "a")]) + + resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] + assert resp.status_code == 204 + assert resp.headers == [] + assert resp.http_version == b"1.0" + + with pytest.raises(LocalProtocolError): + resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] + + with pytest.raises(LocalProtocolError): + InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] + + d = Data(data=b"asdf") + assert d.data == b"asdf" + + eom = EndOfMessage() + assert eom.headers == [] + + cc = ConnectionClosed() + assert repr(cc) == "ConnectionClosed()" + + +def test_intenum_status_code() -> None: + # https://github.com/python-hyper/h11/issues/72 + + r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] + assert r.status_code == HTTPStatus.OK + assert type(r.status_code) is not type(HTTPStatus.OK) + assert type(r.status_code) is int + + +def test_header_casing() -> None: + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert len(r.headers) == 2 + assert r.headers[0] == (b"host", b"example.org") + assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive"), + ] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_headers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_headers.py new file mode 100644 index 00000000..ba53d088 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_headers.py @@ -0,0 +1,157 @@ +import pytest + +from .._events import Request +from .._headers import ( + get_comma_header, + has_expect_100_continue, + Headers, + normalize_and_validate, + set_comma_header, +) +from .._util import LocalProtocolError + + +def test_normalize_and_validate() -> None: + assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] + assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] + + # no leading/trailing whitespace in names + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo ", "bar")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b" foo", "bar")]) + + # no weird characters in names + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([(b"foo bar", b"baz")]) + assert "foo bar" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x00bar", b"baz")]) + # Not even 8-bit characters: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\xffbar", b"baz")]) + # And not even the control characters we allow in values: + with pytest.raises(LocalProtocolError): + normalize_and_validate([(b"foo\x01bar", b"baz")]) + + # no return or NUL characters in values + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("foo", "bar\rbaz")]) + assert "bar\\rbaz" in str(excinfo.value) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\nbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "bar\x00baz")]) + # no leading/trailing whitespace + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz ")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", " barbaz")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "barbaz\t")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("foo", "\tbarbaz")]) + + # content-length + assert normalize_and_validate([("Content-Length", "1")]) == [ + (b"content-length", b"1") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "asdf")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1x")]) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) + assert normalize_and_validate( + [("Content-Length", "0"), ("Content-Length", "0")] + ) == [(b"content-length", b"0")] + assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ + (b"content-length", b"0") + ] + with pytest.raises(LocalProtocolError): + normalize_and_validate( + [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] + ) + with pytest.raises(LocalProtocolError): + normalize_and_validate([("Content-Length", "1 , 1,2")]) + + # transfer-encoding + assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ + (b"transfer-encoding", b"chunked") + ] + assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ + (b"transfer-encoding", b"chunked") + ] + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate([("Transfer-Encoding", "gzip")]) + assert excinfo.value.error_status_hint == 501 # Not Implemented + with pytest.raises(LocalProtocolError) as excinfo: + normalize_and_validate( + [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] + ) + assert excinfo.value.error_status_hint == 501 # Not Implemented + + +def test_get_set_comma_header() -> None: + headers = normalize_and_validate( + [ + ("Connection", "close"), + ("whatever", "something"), + ("connectiON", "fOo,, , BAR"), + ] + ) + + assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] + + headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore + + with pytest.raises(LocalProtocolError): + set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"whatever", b"something"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + ] + + headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore + + assert headers == [ + (b"connection", b"close"), + (b"connection", b"fOo,, , BAR"), + (b"newthing", b"a"), + (b"newthing", b"b"), + (b"whatever", b"different thing"), + ] + + +def test_has_100_continue() -> None: + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + ) + ) + assert not has_expect_100_continue( + Request(method="GET", target="/", headers=[("Host", "example.com")]) + ) + # Case insensitive + assert has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-Continue")], + ) + ) + # Doesn't work in HTTP/1.0 + assert not has_expect_100_continue( + Request( + method="GET", + target="/", + headers=[("Host", "example.com"), ("Expect", "100-continue")], + http_version="1.0", + ) + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_helpers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_helpers.py new file mode 100644 index 00000000..c329c767 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_helpers.py @@ -0,0 +1,32 @@ +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .helpers import normalize_data_events + + +def test_normalize_data_events() -> None: + assert normalize_data_events( + [ + Data(data=bytearray(b"1")), + Data(data=b"2"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"3"), + Data(data=b"4"), + EndOfMessage(), + Data(data=b"5"), + Data(data=b"6"), + Data(data=b"7"), + ] + ) == [ + Data(data=b"12"), + Response(status_code=200, headers=[]), # type: ignore[arg-type] + Data(data=b"34"), + EndOfMessage(), + Data(data=b"567"), + ] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_io.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_io.py new file mode 100644 index 00000000..e9c01bd5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_io.py @@ -0,0 +1,566 @@ +from typing import Any, Callable, Generator, List + +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._headers import Headers, normalize_and_validate +from .._readers import ( + _obsolete_line_fold, + ChunkedReader, + ContentLengthReader, + Http10Reader, + READERS, +) +from .._receivebuffer import ReceiveBuffer +from .._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError +from .._writers import ( + ChunkedWriter, + ContentLengthWriter, + Http10Writer, + write_any_response, + write_headers, + write_request, + WRITERS, +) +from .helpers import normalize_data_events + +SIMPLE_CASES = [ + ( + (CLIENT, IDLE), + Request( + method="GET", + target="/a", + headers=[("Host", "foo"), ("Connection", "close")], + ), + b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), + b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] + b"HTTP/1.1 200 OK\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse( + status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" + ), + b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", + ), + ( + (SERVER, SEND_RESPONSE), + InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] + b"HTTP/1.1 101 Upgrade\r\n\r\n", + ), +] + + +def dowrite(writer: Callable[..., None], obj: Any) -> bytes: + got_list: List[bytes] = [] + writer(obj, got_list.append) + return b"".join(got_list) + + +def tw(writer: Any, obj: Any, expected: Any) -> None: + got = dowrite(writer, obj) + assert got == expected + + +def makebuf(data: bytes) -> ReceiveBuffer: + buf = ReceiveBuffer() + buf += data + return buf + + +def tr(reader: Any, data: bytes, expected: Any) -> None: + def check(got: Any) -> None: + assert got == expected + # Headers should always be returned as bytes, not e.g. bytearray + # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 + for name, value in getattr(got, "headers", []): + assert type(name) is bytes + assert type(value) is bytes + + # Simple: consume whole thing + buf = makebuf(data) + check(reader(buf)) + assert not buf + + # Incrementally growing buffer + buf = ReceiveBuffer() + for i in range(len(data)): + assert reader(buf) is None + buf += data[i : i + 1] + check(reader(buf)) + + # Trailing data + buf = makebuf(data) + buf += b"trailing" + check(reader(buf)) + assert bytes(buf) == b"trailing" + + +def test_writers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tw(WRITERS[role, state], event, binary) + + +def test_readers_simple() -> None: + for ((role, state), event, binary) in SIMPLE_CASES: + tr(READERS[role, state], binary, event) + + +def test_writers_unusual() -> None: + # Simple test of the write_headers utility routine + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("baz", "quux")]), + b"foo: bar\r\nbaz: quux\r\n\r\n", + ) + tw(write_headers, Headers([]), b"\r\n") + + # We understand HTTP/1.0, but we don't speak it + with pytest.raises(LocalProtocolError): + tw( + write_request, + Request( + method="GET", + target="/", + headers=[("Host", "foo"), ("Connection", "close")], + http_version="1.0", + ), + None, + ) + with pytest.raises(LocalProtocolError): + tw( + write_any_response, + Response( + status_code=200, headers=[("Connection", "close")], http_version="1.0" + ), + None, + ) + + +def test_readers_unusual() -> None: + # Reading HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[("Some", "header")], + http_version="1.0", + ), + ) + + # check no-headers, since it's only legal with HTTP/1.0 + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.0\r\n\r\n", + Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", + Response( + status_code=200, + headers=[("Some", "header")], + http_version="1.0", + reason=b"OK", + ), + ) + + # single-character header values (actually disallowed by the ABNF in RFC + # 7230 -- this is a bug in the standard that we originally copied...) + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", + Response( + status_code=200, + headers=[("Foo", "a a a a a")], + http_version="1.0", + reason=b"OK", + ), + ) + + # Empty headers -- also legal + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", + Response( + status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" + ), + ) + + # Tolerate broken servers that leave off the response code + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", + Response( + status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" + ), + ) + + # Tolerate headers line endings (\r\n and \n) + # \n\r\b between headers and body + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader", "val")], + http_version="1.1", + reason="OK", + ), + ) + + # delimited only with \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # mixed \r\n and \n + tr( + READERS[SERVER, SEND_RESPONSE], + b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", + Response( + status_code=200, + headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], + http_version="1.1", + reason="OK", + ), + ) + + # obsolete line folding + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: example.com\r\n" + b"Some: multi-line\r\n" + b" header\r\n" + b"\tnonsense\r\n" + b" \t \t\tI guess\r\n" + b"Connection: close\r\n" + b"More-nonsense: in the\r\n" + b" last header \r\n\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "example.com"), + ("Some", "multi-line header nonsense I guess"), + ("Connection", "close"), + ("More-nonsense", "in the last header"), + ], + ), + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", + None, + ) + + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", + None, + ) + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) + + +def test__obsolete_line_fold_bytes() -> None: + # _obsolete_line_fold has a defensive cast to bytearray, which is + # necessary to protect against O(n^2) behavior in case anyone ever passes + # in regular bytestrings... but right now we never pass in regular + # bytestrings. so this test just exists to get some coverage on that + # defensive cast. + assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ + b"aaa", + bytearray(b"bbb ccc"), + b"ddd", + ] + + +def _run_reader_iter( + reader: Any, buf: bytes, do_eof: bool +) -> Generator[Any, None, None]: + while True: + event = reader(buf) + if event is None: + break + yield event + # body readers have undefined behavior after returning EndOfMessage, + # because this changes the state so they don't get called again + if type(event) is EndOfMessage: + break + if do_eof: + assert not buf + yield reader.read_eof() + + +def _run_reader(*args: Any) -> List[Event]: + events = list(_run_reader_iter(*args)) + return normalize_data_events(events) + + +def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: + # Simple: consume whole thing + print("Test 1") + buf = makebuf(data) + assert _run_reader(thunk(), buf, do_eof) == expected + + # Incrementally growing buffer + print("Test 2") + reader = thunk() + buf = ReceiveBuffer() + events = [] + for i in range(len(data)): + events += _run_reader(reader, buf, False) + buf += data[i : i + 1] + events += _run_reader(reader, buf, do_eof) + assert normalize_data_events(events) == expected + + is_complete = any(type(event) is EndOfMessage for event in expected) + if is_complete and not do_eof: + buf = makebuf(data + b"trailing") + assert _run_reader(thunk(), buf, False) == expected + + +def test_ContentLengthReader() -> None: + t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) + + t_body_reader( + lambda: ContentLengthReader(10), + b"0123456789", + [Data(data=b"0123456789"), EndOfMessage()], + ) + + +def test_Http10Reader() -> None: + t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) + t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) + t_body_reader( + Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True + ) + + +def test_ChunkedReader() -> None: + t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) + + t_body_reader( + ChunkedReader, + b"0\r\nSome: header\r\n\r\n", + [EndOfMessage(headers=[("Some", "header")])], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + + b"10\r\n0123456789abcdef\r\n" + + b"0\r\n" + + b"Some: header\r\n\r\n", + [ + Data(data=b"012340123456789abcdef"), + EndOfMessage(headers=[("Some", "header")]), + ], + ) + + t_body_reader( + ChunkedReader, + b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", + [Data(data=b"012340123456789abcdef"), EndOfMessage()], + ) + + # handles upper and lowercase hex + t_body_reader( + ChunkedReader, + b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", + [Data(data=b"x" * 0xAA), EndOfMessage()], + ) + + # refuses arbitrarily long chunk integers + with pytest.raises(LocalProtocolError): + # Technically this is legal HTTP/1.1, but we refuse to process chunk + # sizes that don't fit into 20 characters of hex + t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) + + # refuses garbage in the chunk count + with pytest.raises(LocalProtocolError): + t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) + + # handles (and discards) "chunk extensions" omg wtf + t_body_reader( + ChunkedReader, + b"5; hello=there\r\n" + + b"xxxxx" + + b"\r\n" + + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', + [Data(data=b"xxxxx"), EndOfMessage()], + ) + + +def test_ContentLengthWriter() -> None: + w = ContentLengthWriter(5) + assert dowrite(w, Data(data=b"123")) == b"123" + assert dowrite(w, Data(data=b"45")) == b"45" + assert dowrite(w, EndOfMessage()) == b"" + + w = ContentLengthWriter(5) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"123456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, Data(data=b"456")) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage()) + + w = ContentLengthWriter(5) + dowrite(w, Data(data=b"123")) == b"123" + dowrite(w, Data(data=b"45")) == b"45" + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_ChunkedWriter() -> None: + w = ChunkedWriter() + assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" + assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" + + assert dowrite(w, Data(data=b"")) == b"" + + assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" + + assert ( + dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) + == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" + ) + + +def test_Http10Writer() -> None: + w = Http10Writer() + assert dowrite(w, Data(data=b"1234")) == b"1234" + assert dowrite(w, EndOfMessage()) == b"" + + with pytest.raises(LocalProtocolError): + dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) + + +def test_reject_garbage_after_request_line() -> None: + with pytest.raises(LocalProtocolError): + tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) + + +def test_reject_garbage_after_response_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", + None, + ) + + +def test_reject_garbage_in_header_line() -> None: + with pytest.raises(LocalProtocolError): + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", + None, + ) + + +def test_reject_non_vchar_in_path() -> None: + for bad_char in b"\x00\x20\x7f\xee": + message = bytearray(b"HEAD /") + message.append(bad_char) + message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") + with pytest.raises(LocalProtocolError): + tr(READERS[CLIENT, IDLE], message, None) + + +# https://github.com/python-hyper/h11/issues/57 +def test_allow_some_garbage_in_cookies() -> None: + tr( + READERS[CLIENT, IDLE], + b"HEAD /foo HTTP/1.1\r\n" + b"Host: foo\r\n" + b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" + b"\r\n", + Request( + method="HEAD", + target="/foo", + headers=[ + ("Host", "foo"), + ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), + ], + ), + ) + + +def test_host_comes_first() -> None: + tw( + write_headers, + normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), + b"Host: example.com\r\nfoo: bar\r\n\r\n", + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_receivebuffer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_receivebuffer.py new file mode 100644 index 00000000..21a3870b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_receivebuffer.py @@ -0,0 +1,135 @@ +import re +from typing import Tuple + +import pytest + +from .._receivebuffer import ReceiveBuffer + + +def test_receivebuffer() -> None: + b = ReceiveBuffer() + assert not b + assert len(b) == 0 + assert bytes(b) == b"" + + b += b"123" + assert b + assert len(b) == 3 + assert bytes(b) == b"123" + + assert bytes(b) == b"123" + + assert b.maybe_extract_at_most(2) == b"12" + assert b + assert len(b) == 1 + assert bytes(b) == b"3" + + assert bytes(b) == b"3" + + assert b.maybe_extract_at_most(10) == b"3" + assert bytes(b) == b"" + + assert b.maybe_extract_at_most(10) is None + assert not b + + ################################################################ + # maybe_extract_until_next + ################################################################ + + b += b"123\n456\r\n789\r\n" + + assert b.maybe_extract_next_line() == b"123\n456\r\n" + assert bytes(b) == b"789\r\n" + + assert b.maybe_extract_next_line() == b"789\r\n" + assert bytes(b) == b"" + + b += b"12\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r" + + b += b"345\n\r" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"12\r345\n\r" + + # here we stopped at the middle of b"\r\n" delimiter + + b += b"\n6789aaa123\r\n" + assert b.maybe_extract_next_line() == b"12\r345\n\r\n" + assert b.maybe_extract_next_line() == b"6789aaa123\r\n" + assert b.maybe_extract_next_line() is None + assert bytes(b) == b"" + + ################################################################ + # maybe_extract_lines + ################################################################ + + b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" + lines = b.maybe_extract_lines() + assert lines == [b"123", b"a: b", b"foo:bar"] + assert bytes(b) == b"trailing" + + assert b.maybe_extract_lines() is None + + b += b"\r\n\r" + assert b.maybe_extract_lines() is None + + assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" + assert not b + + # Empty body case (as happens at the end of chunked encoding if there are + # no trailing headers, e.g.) + b += b"\r\ntrailing" + assert b.maybe_extract_lines() == [] + assert bytes(b) == b"trailing" + + +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ( + b"HTTP/1.1 200 OK\r\n", + b"Content-type: text/plain\r\n", + b"Connection: close\r\n", + b"\r\n", + b"Some body", + ), + id="with_crlf_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_lf_only_delimiter", + ), + pytest.param( + ( + b"HTTP/1.1 200 OK\n", + b"Content-type: text/plain\r\n", + b"Connection: close\n", + b"\n", + b"Some body", + ), + id="with_mixed_crlf_and_lf", + ), + ], +) +def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: + b = ReceiveBuffer() + + for line in data: + b += line + + lines = b.maybe_extract_lines() + + assert lines == [ + b"HTTP/1.1 200 OK", + b"Content-type: text/plain", + b"Connection: close", + ] + assert bytes(b) == b"Some body" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_state.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_state.py new file mode 100644 index 00000000..bc974e63 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_state.py @@ -0,0 +1,271 @@ +import pytest + +from .._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from .._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + CLOSED, + ConnectionState, + DONE, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from .._util import LocalProtocolError + + +def test_ConnectionState() -> None: + cs = ConnectionState() + + # Basic event-triggered transitions + + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + cs.process_event(CLIENT, Request) + # The SERVER-Request special case: + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # Illegal transitions raise an error and nothing happens + with pytest.raises(LocalProtocolError): + cs.process_event(CLIENT, Request) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} + + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: DONE, SERVER: DONE} + + # State-triggered transition + + cs.process_event(SERVER, ConnectionClosed) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} + + +def test_ConnectionState_keep_alive() -> None: + # keep_alive = False + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} + + +def test_ConnectionState_keep_alive_in_DONE() -> None: + # Check that if keep_alive is disabled when the CLIENT is already in DONE, + # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE + # transition + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states[CLIENT] is DONE + cs.process_keep_alive_disabled() + assert cs.states[CLIENT] is MUST_CLOSE + + +def test_ConnectionState_switch_denied() -> None: + for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): + for deny_early in (True, False): + cs = ConnectionState() + cs.process_client_switch_proposal(switch_type) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + assert switch_type in cs.pending_switch_proposals + + if deny_early: + # before client reaches DONE + cs.process_event(SERVER, Response) + assert not cs.pending_switch_proposals + + cs.process_event(CLIENT, EndOfMessage) + + if deny_early: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == { + CLIENT: MIGHT_SWITCH_PROTOCOL, + SERVER: SEND_RESPONSE, + } + + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + assert not cs.pending_switch_proposals + + +_response_type_for_switch = { + _SWITCH_UPGRADE: InformationalResponse, + _SWITCH_CONNECT: Response, + None: Response, +} + + +def test_ConnectionState_protocol_switch_accepted() -> None: + for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(switch_event) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, InformationalResponse) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_double_protocol_switch() -> None: + # CONNECT + Upgrade is legal! Very silly, but legal. So we support + # it. Because sometimes doing the silly thing is easier than not. + for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_client_switch_proposal(_SWITCH_CONNECT) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + cs.process_event( + SERVER, _response_type_for_switch[server_switch], server_switch + ) + if server_switch is None: + assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} + else: + assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} + + +def test_ConnectionState_inconsistent_protocol_switch() -> None: + for client_switches, server_switch in [ + ([], _SWITCH_CONNECT), + ([], _SWITCH_UPGRADE), + ([_SWITCH_UPGRADE], _SWITCH_CONNECT), + ([_SWITCH_CONNECT], _SWITCH_UPGRADE), + ]: + cs = ConnectionState() + for client_switch in client_switches: # type: ignore[attr-defined] + cs.process_client_switch_proposal(client_switch) + cs.process_event(CLIENT, Request) + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Response, server_switch) + + +def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: + # keep_alive=False + pending_switch_proposals + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, Data) + assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} + + # the protocol switch "wins" + cs.process_event(CLIENT, EndOfMessage) + assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} + + # but when the server denies the request, keep_alive comes back into play + cs.process_event(SERVER, Response) + assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} + + +def test_ConnectionState_reuse() -> None: + cs = ConnectionState() + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + # No keepalive + + cs.process_event(CLIENT, Request) + cs.process_keep_alive_disabled() + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # One side closed + + cs = ConnectionState() + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(CLIENT, ConnectionClosed) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Succesful protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) + + with pytest.raises(LocalProtocolError): + cs.start_next_cycle() + + # Failed protocol switch + + cs = ConnectionState() + cs.process_client_switch_proposal(_SWITCH_UPGRADE) + cs.process_event(CLIENT, Request) + cs.process_event(CLIENT, EndOfMessage) + cs.process_event(SERVER, Response) + cs.process_event(SERVER, EndOfMessage) + + cs.start_next_cycle() + assert cs.states == {CLIENT: IDLE, SERVER: IDLE} + + +def test_server_request_is_illegal() -> None: + # There used to be a bug in how we handled the Request special case that + # made this allowed... + cs = ConnectionState() + with pytest.raises(LocalProtocolError): + cs.process_event(SERVER, Request) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_util.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_util.py new file mode 100644 index 00000000..1637919b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/h11/tests/test_util.py @@ -0,0 +1,112 @@ +import re +import sys +import traceback +from typing import NoReturn + +import pytest + +from .._util import ( + bytesify, + LocalProtocolError, + ProtocolError, + RemoteProtocolError, + Sentinel, + validate, +) + + +def test_ProtocolError() -> None: + with pytest.raises(TypeError): + ProtocolError("abstract base class") + + +def test_LocalProtocolError() -> None: + try: + raise LocalProtocolError("foo") + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 400 + + try: + raise LocalProtocolError("foo", error_status_hint=418) + except LocalProtocolError as e: + assert str(e) == "foo" + assert e.error_status_hint == 418 + + def thunk() -> NoReturn: + raise LocalProtocolError("a", error_status_hint=420) + + try: + try: + thunk() + except LocalProtocolError as exc1: + orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + exc1._reraise_as_remote_protocol_error() + except RemoteProtocolError as exc2: + assert type(exc2) is RemoteProtocolError + assert exc2.args == ("a",) + assert exc2.error_status_hint == 420 + new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) + assert new_traceback.endswith(orig_traceback) + + +def test_validate() -> None: + my_re = re.compile(br"(?P[0-9]+)\.(?P[0-9]+)") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.") + + groups = validate(my_re, b"0.1") + assert groups == {"group1": b"0", "group2": b"1"} + + # successful partial matches are an error - must match whole string + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1xx") + with pytest.raises(LocalProtocolError): + validate(my_re, b"0.1\n") + + +def test_validate_formatting() -> None: + my_re = re.compile(br"foo") + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops") + assert "oops" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {}") + assert "oops {}" in str(excinfo.value) + + with pytest.raises(LocalProtocolError) as excinfo: + validate(my_re, b"", "oops {} xx", 10) + assert "oops 10 xx" in str(excinfo.value) + + +def test_make_sentinel() -> None: + class S(Sentinel, metaclass=Sentinel): + pass + + assert repr(S) == "S" + assert S == S + assert type(S).__name__ == "S" + assert S in {S} + assert type(S) is S + + class S2(Sentinel, metaclass=Sentinel): + pass + + assert repr(S2) == "S2" + assert S != S2 + assert S not in {S2} + assert type(S) is not type(S2) + + +def test_bytesify() -> None: + assert bytesify(b"123") == b"123" + assert bytesify(bytearray(b"123")) == b"123" + assert bytesify("123") == b"123" + + with pytest.raises(UnicodeEncodeError): + bytesify("\u1234") + + with pytest.raises(TypeError): + bytesify(10) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/LICENSE new file mode 100644 index 00000000..79a03ca5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2015 MagicStack Inc. http://magic.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/METADATA new file mode 100644 index 00000000..1e6aeab5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/METADATA @@ -0,0 +1,135 @@ +Metadata-Version: 2.1 +Name: httptools +Version: 0.4.0 +Summary: A collection of framework independent HTTP protocol utils. +Home-page: https://github.com/MagicStack/httptools +Author: Yury Selivanov +Author-email: yury@magic.io +License: MIT +Platform: macOS +Platform: POSIX +Platform: Windows +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Environment :: Web Environment +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.5.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: test +Requires-Dist: Cython (<0.30.0,>=0.29.24) ; extra == 'test' + +![Tests](https://github.com/MagicStack/httptools/workflows/Tests/badge.svg) + +httptools is a Python binding for the nodejs HTTP parser. + +The package is available on PyPI: `pip install httptools`. + + +# APIs + +httptools contains two classes `httptools.HttpRequestParser`, +`httptools.HttpResponseParser` (fulfilled through +[llhttp](https://github.com/nodejs/llhttp)) and a function for +parsing URLs `httptools.parse_url` (through +[http-parse](https://github.com/nodejs/http-parser) for now). +See unittests for examples. + + +```python + +class HttpRequestParser: + + def __init__(self, protocol): + """HttpRequestParser + + protocol -- a Python object with the following methods + (all optional): + + - on_message_begin() + - on_url(url: bytes) + - on_header(name: bytes, value: bytes) + - on_headers_complete() + - on_body(body: bytes) + - on_message_complete() + - on_chunk_header() + - on_chunk_complete() + - on_status(status: bytes) + """ + + def get_http_version(self) -> str: + """Return an HTTP protocol version.""" + + def should_keep_alive(self) -> bool: + """Return ``True`` if keep-alive mode is preferred.""" + + def should_upgrade(self) -> bool: + """Return ``True`` if the parsed request is a valid Upgrade request. + The method exposes a flag set just before on_headers_complete. + Calling this method earlier will only yield `False`. + """ + + def feed_data(self, data: bytes): + """Feed data to the parser. + + Will eventually trigger callbacks on the ``protocol`` + object. + + On HTTP upgrade, this method will raise an + ``HttpParserUpgrade`` exception, with its sole argument + set to the offset of the non-HTTP data in ``data``. + """ + + def get_method(self) -> bytes: + """Return HTTP request method (GET, HEAD, etc)""" + + +class HttpResponseParser: + + """Has all methods except ``get_method()`` that + HttpRequestParser has.""" + + def get_status_code(self) -> int: + """Return the status code of the HTTP response""" + + +def parse_url(url: bytes): + """Parse URL strings into a structured Python object. + + Returns an instance of ``httptools.URL`` class with the + following attributes: + + - schema: bytes + - host: bytes + - port: int + - path: bytes + - query: bytes + - fragment: bytes + - userinfo: bytes + """ +``` + + +# Development + +1. Clone this repository with + `git clone --recursive git@github.com:MagicStack/httptools.git` + +2. Create a virtual environment with Python 3: + `python3 -m venv envname` + +3. Activate the environment with `source envname/bin/activate` + +4. Install development requirements with `pip install -e .[test]` + +5. Run `make` and `make test`. + + +# License + +MIT. + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/RECORD new file mode 100644 index 00000000..811bc4a3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/RECORD @@ -0,0 +1,18 @@ +httptools-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httptools-0.4.0.dist-info/LICENSE,sha256=HAKLBZ16WOFMzaOMq87WmMWR_mFP--T3IUMERuhsjWk,1114 +httptools-0.4.0.dist-info/METADATA,sha256=SPBw4NDQwGZinZd69ZJt4v5SQMU-1gcJmjeRT47LOkw,3597 +httptools-0.4.0.dist-info/RECORD,, +httptools-0.4.0.dist-info/WHEEL,sha256=OmDKD3JmZjhMu_YdpkiJDfFL1rf8n9nexhWCGhSBGVE,101 +httptools-0.4.0.dist-info/top_level.txt,sha256=APjJKTbZcj0OQ4fdgf2eTCk82nK1n2BFXOD7ky41MPY,10 +httptools/__init__.py,sha256=uWLIa0nghlhLGoS8My_6YVhUlFbv3FhYsvxYpZHecS8,153 +httptools/__pycache__/__init__.cpython-37.pyc,, +httptools/__pycache__/_version.cpython-37.pyc,, +httptools/_version.py,sha256=jNecmUTYiNqpt3WCDHjdZbEqrAYBV8iD_ZN-kfjLz9I,588 +httptools/parser/__init__.py,sha256=1XGlLzcfsdWYFehxDHaQJUl7RhWnSwG9hWpVkUKK50g,171 +httptools/parser/__pycache__/__init__.cpython-37.pyc,, +httptools/parser/__pycache__/errors.cpython-37.pyc,, +httptools/parser/errors.py,sha256=fQeEGiZ6AQH7SIa-7uzMs5QggoXr0A8YBPVHmafSw5Q,596 +httptools/parser/parser.c,sha256=Vrbwy_G-dwwPCsSNOcQNklEumZXhwls8wbSVLmU2uFQ,400467 +httptools/parser/parser.cp37-win_amd64.pyd,sha256=b9aztssGMqqLNHInYUMEFg48v71APKBKudMIlGLev3M,83968 +httptools/parser/url_parser.c,sha256=h45G5o5mKPlwODL3NyQ_u3NqGCsR4r0jaPgKHQt1LE8,236857 +httptools/parser/url_parser.cp37-win_amd64.pyd,sha256=YZXzMhPJySbxRqdSrcz73jgsv8xiBGaxYf3F8MKg7u8,39424 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/WHEEL new file mode 100644 index 00000000..54091f94 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp37-cp37m-win_amd64 + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..bef3b40b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools-0.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +httptools diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/__init__.py new file mode 100644 index 00000000..972053ef --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/__init__.py @@ -0,0 +1,6 @@ +from . import parser +from .parser import * # NOQA + +from ._version import __version__ # NOQA + +__all__ = parser.__all__ + ('__version__',) # NOQA diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/_version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/_version.py new file mode 100644 index 00000000..adce8989 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/_version.py @@ -0,0 +1,13 @@ +# This file MUST NOT contain anything but the __version__ assignment. +# +# When making a release, change the value of __version__ +# to an appropriate value, and open a pull request against +# the correct branch (master if making a new feature release). +# The commit message MUST contain a properly formatted release +# log, and the commit must be signed. +# +# The release automation will: build and test the packages for the +# supported platforms, publish the packages on PyPI, merge the PR +# to the target branch, create a Git tag pointing to the commit. + +__version__ = '0.4.0' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/__init__.py new file mode 100644 index 00000000..ba371f5e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/__init__.py @@ -0,0 +1,5 @@ +from .parser import * # NoQA +from .errors import * # NoQA +from .url_parser import * # NoQA + +__all__ = parser.__all__ + errors.__all__ + url_parser.__all__ # NoQA diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/errors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/errors.py new file mode 100644 index 00000000..bc24c466 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/errors.py @@ -0,0 +1,30 @@ +__all__ = ('HttpParserError', + 'HttpParserCallbackError', + 'HttpParserInvalidStatusError', + 'HttpParserInvalidMethodError', + 'HttpParserInvalidURLError', + 'HttpParserUpgrade') + + +class HttpParserError(Exception): + pass + + +class HttpParserCallbackError(HttpParserError): + pass + + +class HttpParserInvalidStatusError(HttpParserError): + pass + + +class HttpParserInvalidMethodError(HttpParserError): + pass + + +class HttpParserInvalidURLError(HttpParserError): + pass + + +class HttpParserUpgrade(Exception): + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.c b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.c new file mode 100644 index 00000000..6e4778ae --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.c @@ -0,0 +1,10421 @@ +/* Generated by Cython 0.29.28 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "extra_compile_args": [ + "-O2" + ], + "name": "httptools.parser.parser", + "sources": [ + "httptools/parser/parser.pyx" + ] + }, + "module_name": "httptools.parser.parser" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_28" +#define CYTHON_HEX_VERSION 0x001D1CF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #elif !defined(CYTHON_FAST_THREAD_STATE) + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } +#else + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if defined(PyUnicode_IS_READY) + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #else + #define __Pyx_PyUnicode_READY(op) (0) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__httptools__parser__parser +#define __PYX_HAVE_API__httptools__parser__parser +/* Early includes */ +#include +#include +#include "pythread.h" +#include +#include "llhttp.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "httptools\\parser\\parser.pyx", + "stringsource", + "type.pxd", + "bool.pxd", + "complex.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_9httptools_6parser_6parser_HttpParser; +struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser; +struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser; + +/* "httptools/parser/parser.pyx":26 + * + * @cython.internal + * cdef class HttpParser: # <<<<<<<<<<<<<< + * + * cdef: + */ +struct __pyx_obj_9httptools_6parser_6parser_HttpParser { + PyObject_HEAD + struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *__pyx_vtab; + llhttp_t *_cparser; + llhttp_settings_t *_csettings; + PyObject *_current_header_name; + PyObject *_current_header_value; + PyObject *_proto_on_url; + PyObject *_proto_on_status; + PyObject *_proto_on_body; + PyObject *_proto_on_header; + PyObject *_proto_on_headers_complete; + PyObject *_proto_on_message_complete; + PyObject *_proto_on_chunk_header; + PyObject *_proto_on_chunk_complete; + PyObject *_proto_on_message_begin; + PyObject *_last_error; + Py_buffer py_buf; +}; + + +/* "httptools/parser/parser.pyx":215 + * + * + * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol): + */ +struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser __pyx_base; +}; + + +/* "httptools/parser/parser.pyx":229 + * + * + * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol): + */ +struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser __pyx_base; +}; + + + +/* "httptools/parser/parser.pyx":26 + * + * @cython.internal + * cdef class HttpParser: # <<<<<<<<<<<<<< + * + * cdef: + */ + +struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser { + PyObject *(*_init)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *, llhttp_type_t); + PyObject *(*_maybe_call_on_header)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *); + PyObject *(*_on_header_field)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *); + PyObject *(*_on_header_value)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *); + PyObject *(*_on_headers_complete)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *); + PyObject *(*_on_chunk_header)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *); + PyObject *(*_on_chunk_complete)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *); +}; +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser; + + +/* "httptools/parser/parser.pyx":215 + * + * + * cdef class HttpRequestParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol): + */ + +struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser { + struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_base; +}; +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser; + + +/* "httptools/parser/parser.pyx":229 + * + * + * cdef class HttpResponseParser(HttpParser): # <<<<<<<<<<<<<< + * + * def __init__(self, protocol): + */ + +struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser { + struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_base; +}; +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser *__pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* KeywordStringCheck.proto */ +static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if CYTHON_FAST_PYCALL + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o, n, NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value); +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* decode_c_string_utf16.proto */ +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 0; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = -1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} +static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { + int byteorder = 1; + return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); +} + +/* decode_c_bytes.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); + +/* decode_bytes.proto */ +static CYTHON_INLINE PyObject* __Pyx_decode_bytes( + PyObject* string, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + return __Pyx_decode_c_bytes( + PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), + start, stop, encoding, errors, decode_func); +} + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint8_t(uint8_t value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_ptrdiff_t(ptrdiff_t value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__init(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_protocol, llhttp_type_t __pyx_v_mode); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto*/ + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'cpython.version' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.module' */ + +/* Module declarations from 'cpython.tuple' */ + +/* Module declarations from 'cpython.list' */ + +/* Module declarations from 'cpython.sequence' */ + +/* Module declarations from 'cpython.mapping' */ + +/* Module declarations from 'cpython.iterator' */ + +/* Module declarations from 'cpython.number' */ + +/* Module declarations from 'cpython.int' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.bool' */ +static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; + +/* Module declarations from 'cpython.long' */ + +/* Module declarations from 'cpython.float' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.complex' */ +static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; + +/* Module declarations from 'cpython.string' */ + +/* Module declarations from 'cpython.unicode' */ + +/* Module declarations from 'cpython.dict' */ + +/* Module declarations from 'cpython.instance' */ + +/* Module declarations from 'cpython.function' */ + +/* Module declarations from 'cpython.method' */ + +/* Module declarations from 'cpython.weakref' */ + +/* Module declarations from 'cpython.getargs' */ + +/* Module declarations from 'cpython.pythread' */ + +/* Module declarations from 'cpython.pystate' */ + +/* Module declarations from 'cpython.cobject' */ + +/* Module declarations from 'cpython.oldbuffer' */ + +/* Module declarations from 'cpython.set' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'cpython.pycapsule' */ + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'httptools.parser.python' */ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'httptools.parser' */ + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'httptools.parser.cparser' */ + +/* Module declarations from 'httptools.parser.parser' */ +static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpParser = 0; +static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpRequestParser = 0; +static PyTypeObject *__pyx_ptype_9httptools_6parser_6parser_HttpResponseParser = 0; +static int __pyx_f_9httptools_6parser_6parser_cb_on_message_begin(llhttp_t *); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_url(llhttp_t *, char const *, size_t); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_status(llhttp_t *, char const *, size_t); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_header_field(llhttp_t *, char const *, size_t); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_header_value(llhttp_t *, char const *, size_t); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete(llhttp_t *); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_body(llhttp_t *, char const *, size_t); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_message_complete(llhttp_t *); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header(llhttp_t *); /*proto*/ +static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete(llhttp_t *); /*proto*/ +static PyObject *__pyx_f_9httptools_6parser_6parser_parser_error_from_errno(llhttp_t *, llhttp_errno_t); /*proto*/ +#define __Pyx_MODULE_NAME "httptools.parser.parser" +extern int __pyx_module_is_main_httptools__parser__parser; +int __pyx_module_is_main_httptools__parser__parser = 0; + +/* Implementation of 'httptools.parser.parser' */ +static PyObject *__pyx_builtin_MemoryError; +static PyObject *__pyx_builtin_TypeError; +static PyObject *__pyx_builtin_BaseException; +static const char __pyx_k_[] = "{}.{}"; +static const char __pyx_k_all[] = "__all__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_errors[] = "errors"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_on_url[] = "on_url"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_context[] = "__context__"; +static const char __pyx_k_on_body[] = "on_body"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_protocol[] = "protocol"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_on_header[] = "on_header"; +static const char __pyx_k_on_status[] = "on_status"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_MemoryError[] = "MemoryError"; +static const char __pyx_k_BaseException[] = "BaseException"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_HttpParserError[] = "HttpParserError"; +static const char __pyx_k_on_chunk_header[] = "on_chunk_header"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_on_message_begin[] = "on_message_begin"; +static const char __pyx_k_HttpParserUpgrade[] = "HttpParserUpgrade"; +static const char __pyx_k_HttpRequestParser[] = "HttpRequestParser"; +static const char __pyx_k_on_chunk_complete[] = "on_chunk_complete"; +static const char __pyx_k_HttpResponseParser[] = "HttpResponseParser"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_on_headers_complete[] = "on_headers_complete"; +static const char __pyx_k_on_message_complete[] = "on_message_complete"; +static const char __pyx_k_invalid_headers_state[] = "invalid headers state"; +static const char __pyx_k_HttpParserCallbackError[] = "HttpParserCallbackError"; +static const char __pyx_k_HttpParserInvalidURLError[] = "HttpParserInvalidURLError"; +static const char __pyx_k_HttpParserInvalidMethodError[] = "HttpParserInvalidMethodError"; +static const char __pyx_k_HttpParserInvalidStatusError[] = "HttpParserInvalidStatusError"; +static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; +static PyObject *__pyx_kp_u_; +static PyObject *__pyx_n_s_BaseException; +static PyObject *__pyx_n_s_HttpParserCallbackError; +static PyObject *__pyx_n_s_HttpParserError; +static PyObject *__pyx_n_s_HttpParserInvalidMethodError; +static PyObject *__pyx_n_s_HttpParserInvalidStatusError; +static PyObject *__pyx_n_s_HttpParserInvalidURLError; +static PyObject *__pyx_n_s_HttpParserUpgrade; +static PyObject *__pyx_n_s_HttpRequestParser; +static PyObject *__pyx_n_u_HttpRequestParser; +static PyObject *__pyx_n_s_HttpResponseParser; +static PyObject *__pyx_n_u_HttpResponseParser; +static PyObject *__pyx_n_s_MemoryError; +static PyObject *__pyx_n_s_TypeError; +static PyObject *__pyx_n_s_all; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_context; +static PyObject *__pyx_n_s_errors; +static PyObject *__pyx_n_s_format; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_kp_u_invalid_headers_state; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_kp_s_no_default___reduce___due_to_non; +static PyObject *__pyx_n_u_on_body; +static PyObject *__pyx_n_u_on_chunk_complete; +static PyObject *__pyx_n_u_on_chunk_header; +static PyObject *__pyx_n_u_on_header; +static PyObject *__pyx_n_u_on_headers_complete; +static PyObject *__pyx_n_u_on_message_begin; +static PyObject *__pyx_n_u_on_message_complete; +static PyObject *__pyx_n_u_on_status; +static PyObject *__pyx_n_u_on_url; +static PyObject *__pyx_n_s_protocol; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_n_s_test; +static int __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static void __pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +/* Late includes */ + +/* "httptools/parser/parser.pyx":44 + * Py_buffer py_buf + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + */ + +/* Python wrapper */ +static int __pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9httptools_6parser_6parser_10HttpParser___cinit__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "httptools/parser/parser.pyx":45 + * + * def __cinit__(self): + * self._cparser = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + * if self._cparser is NULL: + */ + __pyx_v_self->_cparser = ((llhttp_t *)PyMem_Malloc((sizeof(llhttp_t)))); + + /* "httptools/parser/parser.pyx":47 + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + * if self._cparser is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + __pyx_t_1 = ((__pyx_v_self->_cparser == NULL) != 0); + if (unlikely(__pyx_t_1)) { + + /* "httptools/parser/parser.pyx":48 + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + * if self._cparser is NULL: + * raise MemoryError() # <<<<<<<<<<<<<< + * + * self._csettings = \ + */ + PyErr_NoMemory(); __PYX_ERR(0, 48, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":47 + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + * if self._cparser is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + } + + /* "httptools/parser/parser.pyx":50 + * raise MemoryError() + * + * self._csettings = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + * if self._csettings is NULL: + */ + __pyx_v_self->_csettings = ((llhttp_settings_t *)PyMem_Malloc((sizeof(llhttp_settings_t)))); + + /* "httptools/parser/parser.pyx":52 + * self._csettings = \ + * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + * if self._csettings is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + __pyx_t_1 = ((__pyx_v_self->_csettings == NULL) != 0); + if (unlikely(__pyx_t_1)) { + + /* "httptools/parser/parser.pyx":53 + * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + * if self._csettings is NULL: + * raise MemoryError() # <<<<<<<<<<<<<< + * + * def __dealloc__(self): + */ + PyErr_NoMemory(); __PYX_ERR(0, 53, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":52 + * self._csettings = \ + * PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) + * if self._csettings is NULL: # <<<<<<<<<<<<<< + * raise MemoryError() + * + */ + } + + /* "httptools/parser/parser.pyx":44 + * Py_buffer py_buf + * + * def __cinit__(self): # <<<<<<<<<<<<<< + * self._cparser = \ + * PyMem_Malloc(sizeof(cparser.llhttp_t)) + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":55 + * raise MemoryError() + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) + */ + +/* Python wrapper */ +static void __pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ +static void __pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); + __pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +static void __pyx_pf_9httptools_6parser_6parser_10HttpParser_2__dealloc__(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__dealloc__", 0); + + /* "httptools/parser/parser.pyx":56 + * + * def __dealloc__(self): + * PyMem_Free(self._cparser) # <<<<<<<<<<<<<< + * PyMem_Free(self._csettings) + * + */ + PyMem_Free(__pyx_v_self->_cparser); + + /* "httptools/parser/parser.pyx":57 + * def __dealloc__(self): + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) # <<<<<<<<<<<<<< + * + * cdef _init(self, protocol, cparser.llhttp_type_t mode): + */ + PyMem_Free(__pyx_v_self->_csettings); + + /* "httptools/parser/parser.pyx":55 + * raise MemoryError() + * + * def __dealloc__(self): # <<<<<<<<<<<<<< + * PyMem_Free(self._cparser) + * PyMem_Free(self._csettings) + */ + + /* function exit code */ + __Pyx_RefNannyFinishContext(); +} + +/* "httptools/parser/parser.pyx":59 + * PyMem_Free(self._csettings) + * + * cdef _init(self, protocol, cparser.llhttp_type_t mode): # <<<<<<<<<<<<<< + * cparser.llhttp_settings_init(self._csettings) + * + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__init(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_protocol, llhttp_type_t __pyx_v_mode) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_init", 0); + + /* "httptools/parser/parser.pyx":60 + * + * cdef _init(self, protocol, cparser.llhttp_type_t mode): + * cparser.llhttp_settings_init(self._csettings) # <<<<<<<<<<<<<< + * + * cparser.llhttp_init(self._cparser, mode, self._csettings) + */ + llhttp_settings_init(__pyx_v_self->_csettings); + + /* "httptools/parser/parser.pyx":62 + * cparser.llhttp_settings_init(self._csettings) + * + * cparser.llhttp_init(self._cparser, mode, self._csettings) # <<<<<<<<<<<<<< + * self._cparser.data = self + * + */ + llhttp_init(__pyx_v_self->_cparser, __pyx_v_mode, __pyx_v_self->_csettings); + + /* "httptools/parser/parser.pyx":63 + * + * cparser.llhttp_init(self._cparser, mode, self._csettings) + * self._cparser.data = self # <<<<<<<<<<<<<< + * + * self._current_header_name = None + */ + __pyx_v_self->_cparser->data = ((void *)__pyx_v_self); + + /* "httptools/parser/parser.pyx":65 + * self._cparser.data = self + * + * self._current_header_name = None # <<<<<<<<<<<<<< + * self._current_header_value = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_current_header_name); + __Pyx_DECREF(__pyx_v_self->_current_header_name); + __pyx_v_self->_current_header_name = ((PyObject*)Py_None); + + /* "httptools/parser/parser.pyx":66 + * + * self._current_header_name = None + * self._current_header_value = None # <<<<<<<<<<<<<< + * + * self._proto_on_header = getattr(protocol, 'on_header', None) + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_current_header_value); + __Pyx_DECREF(__pyx_v_self->_current_header_value); + __pyx_v_self->_current_header_value = ((PyObject*)Py_None); + + /* "httptools/parser/parser.pyx":68 + * self._current_header_value = None + * + * self._proto_on_header = getattr(protocol, 'on_header', None) # <<<<<<<<<<<<<< + * if self._proto_on_header is not None: + * self._csettings.on_header_field = cb_on_header_field + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_header, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_header); + __Pyx_DECREF(__pyx_v_self->_proto_on_header); + __pyx_v_self->_proto_on_header = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":69 + * + * self._proto_on_header = getattr(protocol, 'on_header', None) + * if self._proto_on_header is not None: # <<<<<<<<<<<<<< + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value + */ + __pyx_t_2 = (__pyx_v_self->_proto_on_header != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":70 + * self._proto_on_header = getattr(protocol, 'on_header', None) + * if self._proto_on_header is not None: + * self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<< + * self._csettings.on_header_value = cb_on_header_value + * self._proto_on_headers_complete = getattr( + */ + __pyx_v_self->_csettings->on_header_field = __pyx_f_9httptools_6parser_6parser_cb_on_header_field; + + /* "httptools/parser/parser.pyx":71 + * if self._proto_on_header is not None: + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<< + * self._proto_on_headers_complete = getattr( + * protocol, 'on_headers_complete', None) + */ + __pyx_v_self->_csettings->on_header_value = __pyx_f_9httptools_6parser_6parser_cb_on_header_value; + + /* "httptools/parser/parser.pyx":69 + * + * self._proto_on_header = getattr(protocol, 'on_header', None) + * if self._proto_on_header is not None: # <<<<<<<<<<<<<< + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value + */ + } + + /* "httptools/parser/parser.pyx":72 + * self._csettings.on_header_field = cb_on_header_field + * self._csettings.on_header_value = cb_on_header_value + * self._proto_on_headers_complete = getattr( # <<<<<<<<<<<<<< + * protocol, 'on_headers_complete', None) + * self._csettings.on_headers_complete = cb_on_headers_complete + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_headers_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_headers_complete); + __Pyx_DECREF(__pyx_v_self->_proto_on_headers_complete); + __pyx_v_self->_proto_on_headers_complete = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":74 + * self._proto_on_headers_complete = getattr( + * protocol, 'on_headers_complete', None) + * self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<< + * + * self._proto_on_body = getattr(protocol, 'on_body', None) + */ + __pyx_v_self->_csettings->on_headers_complete = __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete; + + /* "httptools/parser/parser.pyx":76 + * self._csettings.on_headers_complete = cb_on_headers_complete + * + * self._proto_on_body = getattr(protocol, 'on_body', None) # <<<<<<<<<<<<<< + * if self._proto_on_body is not None: + * self._csettings.on_body = cb_on_body + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_body, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 76, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_body); + __Pyx_DECREF(__pyx_v_self->_proto_on_body); + __pyx_v_self->_proto_on_body = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":77 + * + * self._proto_on_body = getattr(protocol, 'on_body', None) + * if self._proto_on_body is not None: # <<<<<<<<<<<<<< + * self._csettings.on_body = cb_on_body + * + */ + __pyx_t_3 = (__pyx_v_self->_proto_on_body != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "httptools/parser/parser.pyx":78 + * self._proto_on_body = getattr(protocol, 'on_body', None) + * if self._proto_on_body is not None: + * self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<< + * + * self._proto_on_message_begin = getattr( + */ + __pyx_v_self->_csettings->on_body = __pyx_f_9httptools_6parser_6parser_cb_on_body; + + /* "httptools/parser/parser.pyx":77 + * + * self._proto_on_body = getattr(protocol, 'on_body', None) + * if self._proto_on_body is not None: # <<<<<<<<<<<<<< + * self._csettings.on_body = cb_on_body + * + */ + } + + /* "httptools/parser/parser.pyx":80 + * self._csettings.on_body = cb_on_body + * + * self._proto_on_message_begin = getattr( # <<<<<<<<<<<<<< + * protocol, 'on_message_begin', None) + * if self._proto_on_message_begin is not None: + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_message_begin, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 80, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_message_begin); + __Pyx_DECREF(__pyx_v_self->_proto_on_message_begin); + __pyx_v_self->_proto_on_message_begin = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":82 + * self._proto_on_message_begin = getattr( + * protocol, 'on_message_begin', None) + * if self._proto_on_message_begin is not None: # <<<<<<<<<<<<<< + * self._csettings.on_message_begin = cb_on_message_begin + * + */ + __pyx_t_2 = (__pyx_v_self->_proto_on_message_begin != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":83 + * protocol, 'on_message_begin', None) + * if self._proto_on_message_begin is not None: + * self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<< + * + * self._proto_on_message_complete = getattr( + */ + __pyx_v_self->_csettings->on_message_begin = __pyx_f_9httptools_6parser_6parser_cb_on_message_begin; + + /* "httptools/parser/parser.pyx":82 + * self._proto_on_message_begin = getattr( + * protocol, 'on_message_begin', None) + * if self._proto_on_message_begin is not None: # <<<<<<<<<<<<<< + * self._csettings.on_message_begin = cb_on_message_begin + * + */ + } + + /* "httptools/parser/parser.pyx":85 + * self._csettings.on_message_begin = cb_on_message_begin + * + * self._proto_on_message_complete = getattr( # <<<<<<<<<<<<<< + * protocol, 'on_message_complete', None) + * if self._proto_on_message_complete is not None: + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_message_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_message_complete); + __Pyx_DECREF(__pyx_v_self->_proto_on_message_complete); + __pyx_v_self->_proto_on_message_complete = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":87 + * self._proto_on_message_complete = getattr( + * protocol, 'on_message_complete', None) + * if self._proto_on_message_complete is not None: # <<<<<<<<<<<<<< + * self._csettings.on_message_complete = cb_on_message_complete + * + */ + __pyx_t_3 = (__pyx_v_self->_proto_on_message_complete != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "httptools/parser/parser.pyx":88 + * protocol, 'on_message_complete', None) + * if self._proto_on_message_complete is not None: + * self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<< + * + * self._proto_on_chunk_header = getattr( + */ + __pyx_v_self->_csettings->on_message_complete = __pyx_f_9httptools_6parser_6parser_cb_on_message_complete; + + /* "httptools/parser/parser.pyx":87 + * self._proto_on_message_complete = getattr( + * protocol, 'on_message_complete', None) + * if self._proto_on_message_complete is not None: # <<<<<<<<<<<<<< + * self._csettings.on_message_complete = cb_on_message_complete + * + */ + } + + /* "httptools/parser/parser.pyx":90 + * self._csettings.on_message_complete = cb_on_message_complete + * + * self._proto_on_chunk_header = getattr( # <<<<<<<<<<<<<< + * protocol, 'on_chunk_header', None) + * self._csettings.on_chunk_header = cb_on_chunk_header + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_chunk_header, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_chunk_header); + __Pyx_DECREF(__pyx_v_self->_proto_on_chunk_header); + __pyx_v_self->_proto_on_chunk_header = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":92 + * self._proto_on_chunk_header = getattr( + * protocol, 'on_chunk_header', None) + * self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<< + * + * self._proto_on_chunk_complete = getattr( + */ + __pyx_v_self->_csettings->on_chunk_header = __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header; + + /* "httptools/parser/parser.pyx":94 + * self._csettings.on_chunk_header = cb_on_chunk_header + * + * self._proto_on_chunk_complete = getattr( # <<<<<<<<<<<<<< + * protocol, 'on_chunk_complete', None) + * self._csettings.on_chunk_complete = cb_on_chunk_complete + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_chunk_complete, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_proto_on_chunk_complete); + __Pyx_DECREF(__pyx_v_self->_proto_on_chunk_complete); + __pyx_v_self->_proto_on_chunk_complete = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":96 + * self._proto_on_chunk_complete = getattr( + * protocol, 'on_chunk_complete', None) + * self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<< + * + * self._last_error = None + */ + __pyx_v_self->_csettings->on_chunk_complete = __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete; + + /* "httptools/parser/parser.pyx":98 + * self._csettings.on_chunk_complete = cb_on_chunk_complete + * + * self._last_error = None # <<<<<<<<<<<<<< + * + * cdef _maybe_call_on_header(self): + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_error); + __Pyx_DECREF(__pyx_v_self->_last_error); + __pyx_v_self->_last_error = Py_None; + + /* "httptools/parser/parser.pyx":59 + * PyMem_Free(self._csettings) + * + * cdef _init(self, protocol, cparser.llhttp_type_t mode): # <<<<<<<<<<<<<< + * cparser.llhttp_settings_init(self._csettings) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._init", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":100 + * self._last_error = None + * + * cdef _maybe_call_on_header(self): # <<<<<<<<<<<<<< + * if self._current_header_value is not None: + * current_header_name = self._current_header_name + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_v_current_header_name = NULL; + PyObject *__pyx_v_current_header_value = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_maybe_call_on_header", 0); + + /* "httptools/parser/parser.pyx":101 + * + * cdef _maybe_call_on_header(self): + * if self._current_header_value is not None: # <<<<<<<<<<<<<< + * current_header_name = self._current_header_name + * current_header_value = self._current_header_value + */ + __pyx_t_1 = (__pyx_v_self->_current_header_value != ((PyObject*)Py_None)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "httptools/parser/parser.pyx":102 + * cdef _maybe_call_on_header(self): + * if self._current_header_value is not None: + * current_header_name = self._current_header_name # <<<<<<<<<<<<<< + * current_header_value = self._current_header_value + * + */ + __pyx_t_3 = __pyx_v_self->_current_header_name; + __Pyx_INCREF(__pyx_t_3); + __pyx_v_current_header_name = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "httptools/parser/parser.pyx":103 + * if self._current_header_value is not None: + * current_header_name = self._current_header_name + * current_header_value = self._current_header_value # <<<<<<<<<<<<<< + * + * self._current_header_name = self._current_header_value = None + */ + __pyx_t_3 = __pyx_v_self->_current_header_value; + __Pyx_INCREF(__pyx_t_3); + __pyx_v_current_header_value = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + + /* "httptools/parser/parser.pyx":105 + * current_header_value = self._current_header_value + * + * self._current_header_name = self._current_header_value = None # <<<<<<<<<<<<<< + * + * if self._proto_on_header is not None: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_current_header_name); + __Pyx_DECREF(__pyx_v_self->_current_header_name); + __pyx_v_self->_current_header_name = ((PyObject*)Py_None); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_current_header_value); + __Pyx_DECREF(__pyx_v_self->_current_header_value); + __pyx_v_self->_current_header_value = ((PyObject*)Py_None); + + /* "httptools/parser/parser.pyx":107 + * self._current_header_name = self._current_header_value = None + * + * if self._proto_on_header is not None: # <<<<<<<<<<<<<< + * self._proto_on_header(current_header_name, + * current_header_value) + */ + __pyx_t_2 = (__pyx_v_self->_proto_on_header != Py_None); + __pyx_t_1 = (__pyx_t_2 != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":109 + * if self._proto_on_header is not None: + * self._proto_on_header(current_header_name, + * current_header_value) # <<<<<<<<<<<<<< + * + * cdef _on_header_field(self, bytes field): + */ + __Pyx_INCREF(__pyx_v_self->_proto_on_header); + __pyx_t_4 = __pyx_v_self->_proto_on_header; __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_current_header_name, __pyx_v_current_header_value}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_current_header_name, __pyx_v_current_header_value}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 2+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_current_header_name); + __Pyx_GIVEREF(__pyx_v_current_header_name); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_current_header_name); + __Pyx_INCREF(__pyx_v_current_header_value); + __Pyx_GIVEREF(__pyx_v_current_header_value); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_current_header_value); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "httptools/parser/parser.pyx":107 + * self._current_header_name = self._current_header_value = None + * + * if self._proto_on_header is not None: # <<<<<<<<<<<<<< + * self._proto_on_header(current_header_name, + * current_header_value) + */ + } + + /* "httptools/parser/parser.pyx":101 + * + * cdef _maybe_call_on_header(self): + * if self._current_header_value is not None: # <<<<<<<<<<<<<< + * current_header_name = self._current_header_name + * current_header_value = self._current_header_value + */ + } + + /* "httptools/parser/parser.pyx":100 + * self._last_error = None + * + * cdef _maybe_call_on_header(self): # <<<<<<<<<<<<<< + * if self._current_header_value is not None: + * current_header_name = self._current_header_name + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._maybe_call_on_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_current_header_name); + __Pyx_XDECREF(__pyx_v_current_header_value); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":111 + * current_header_value) + * + * cdef _on_header_field(self, bytes field): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * if self._current_header_name is None: + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_on_header_field", 0); + + /* "httptools/parser/parser.pyx":112 + * + * cdef _on_header_field(self, bytes field): + * self._maybe_call_on_header() # <<<<<<<<<<<<<< + * if self._current_header_name is None: + * self._current_header_name = field + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":113 + * cdef _on_header_field(self, bytes field): + * self._maybe_call_on_header() + * if self._current_header_name is None: # <<<<<<<<<<<<<< + * self._current_header_name = field + * else: + */ + __pyx_t_2 = (__pyx_v_self->_current_header_name == ((PyObject*)Py_None)); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":114 + * self._maybe_call_on_header() + * if self._current_header_name is None: + * self._current_header_name = field # <<<<<<<<<<<<<< + * else: + * self._current_header_name += field + */ + __Pyx_INCREF(__pyx_v_field); + __Pyx_GIVEREF(__pyx_v_field); + __Pyx_GOTREF(__pyx_v_self->_current_header_name); + __Pyx_DECREF(__pyx_v_self->_current_header_name); + __pyx_v_self->_current_header_name = __pyx_v_field; + + /* "httptools/parser/parser.pyx":113 + * cdef _on_header_field(self, bytes field): + * self._maybe_call_on_header() + * if self._current_header_name is None: # <<<<<<<<<<<<<< + * self._current_header_name = field + * else: + */ + goto __pyx_L3; + } + + /* "httptools/parser/parser.pyx":116 + * self._current_header_name = field + * else: + * self._current_header_name += field # <<<<<<<<<<<<<< + * + * cdef _on_header_value(self, bytes val): + */ + /*else*/ { + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_self->_current_header_name, __pyx_v_field); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_current_header_name); + __Pyx_DECREF(__pyx_v_self->_current_header_name); + __pyx_v_self->_current_header_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + } + __pyx_L3:; + + /* "httptools/parser/parser.pyx":111 + * current_header_value) + * + * cdef _on_header_field(self, bytes field): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * if self._current_header_name is None: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":118 + * self._current_header_name += field + * + * cdef _on_header_value(self, bytes val): # <<<<<<<<<<<<<< + * if self._current_header_value is None: + * self._current_header_value = val + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_on_header_value", 0); + + /* "httptools/parser/parser.pyx":119 + * + * cdef _on_header_value(self, bytes val): + * if self._current_header_value is None: # <<<<<<<<<<<<<< + * self._current_header_value = val + * else: + */ + __pyx_t_1 = (__pyx_v_self->_current_header_value == ((PyObject*)Py_None)); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "httptools/parser/parser.pyx":120 + * cdef _on_header_value(self, bytes val): + * if self._current_header_value is None: + * self._current_header_value = val # <<<<<<<<<<<<<< + * else: + * # This is unlikely, as mostly HTTP headers are one-line + */ + __Pyx_INCREF(__pyx_v_val); + __Pyx_GIVEREF(__pyx_v_val); + __Pyx_GOTREF(__pyx_v_self->_current_header_value); + __Pyx_DECREF(__pyx_v_self->_current_header_value); + __pyx_v_self->_current_header_value = __pyx_v_val; + + /* "httptools/parser/parser.pyx":119 + * + * cdef _on_header_value(self, bytes val): + * if self._current_header_value is None: # <<<<<<<<<<<<<< + * self._current_header_value = val + * else: + */ + goto __pyx_L3; + } + + /* "httptools/parser/parser.pyx":123 + * else: + * # This is unlikely, as mostly HTTP headers are one-line + * self._current_header_value += val # <<<<<<<<<<<<<< + * + * cdef _on_headers_complete(self): + */ + /*else*/ { + __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_self->_current_header_value, __pyx_v_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_v_self->_current_header_value); + __Pyx_DECREF(__pyx_v_self->_current_header_value); + __pyx_v_self->_current_header_value = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + } + __pyx_L3:; + + /* "httptools/parser/parser.pyx":118 + * self._current_header_name += field + * + * cdef _on_header_value(self, bytes val): # <<<<<<<<<<<<<< + * if self._current_header_value is None: + * self._current_header_value = val + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":125 + * self._current_header_value += val + * + * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_on_headers_complete", 0); + + /* "httptools/parser/parser.pyx":126 + * + * cdef _on_headers_complete(self): + * self._maybe_call_on_header() # <<<<<<<<<<<<<< + * + * if self._proto_on_headers_complete is not None: + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":128 + * self._maybe_call_on_header() + * + * if self._proto_on_headers_complete is not None: # <<<<<<<<<<<<<< + * self._proto_on_headers_complete() + * + */ + __pyx_t_2 = (__pyx_v_self->_proto_on_headers_complete != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":129 + * + * if self._proto_on_headers_complete is not None: + * self._proto_on_headers_complete() # <<<<<<<<<<<<<< + * + * cdef _on_chunk_header(self): + */ + __Pyx_INCREF(__pyx_v_self->_proto_on_headers_complete); + __pyx_t_4 = __pyx_v_self->_proto_on_headers_complete; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":128 + * self._maybe_call_on_header() + * + * if self._proto_on_headers_complete is not None: # <<<<<<<<<<<<<< + * self._proto_on_headers_complete() + * + */ + } + + /* "httptools/parser/parser.pyx":125 + * self._current_header_value += val + * + * cdef _on_headers_complete(self): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":131 + * self._proto_on_headers_complete() + * + * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< + * if (self._current_header_value is not None or + * self._current_header_name is not None): + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_on_chunk_header", 0); + + /* "httptools/parser/parser.pyx":132 + * + * cdef _on_chunk_header(self): + * if (self._current_header_value is not None or # <<<<<<<<<<<<<< + * self._current_header_name is not None): + * raise HttpParserError('invalid headers state') + */ + __pyx_t_2 = (__pyx_v_self->_current_header_value != ((PyObject*)Py_None)); + __pyx_t_3 = (__pyx_t_2 != 0); + if (!__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L4_bool_binop_done; + } + + /* "httptools/parser/parser.pyx":133 + * cdef _on_chunk_header(self): + * if (self._current_header_value is not None or + * self._current_header_name is not None): # <<<<<<<<<<<<<< + * raise HttpParserError('invalid headers state') + * + */ + __pyx_t_3 = (__pyx_v_self->_current_header_name != ((PyObject*)Py_None)); + __pyx_t_2 = (__pyx_t_3 != 0); + __pyx_t_1 = __pyx_t_2; + __pyx_L4_bool_binop_done:; + + /* "httptools/parser/parser.pyx":132 + * + * cdef _on_chunk_header(self): + * if (self._current_header_value is not None or # <<<<<<<<<<<<<< + * self._current_header_name is not None): + * raise HttpParserError('invalid headers state') + */ + if (unlikely(__pyx_t_1)) { + + /* "httptools/parser/parser.pyx":134 + * if (self._current_header_value is not None or + * self._current_header_name is not None): + * raise HttpParserError('invalid headers state') # <<<<<<<<<<<<<< + * + * if self._proto_on_chunk_header is not None: + */ + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_u_invalid_headers_state) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_u_invalid_headers_state); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 134, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":132 + * + * cdef _on_chunk_header(self): + * if (self._current_header_value is not None or # <<<<<<<<<<<<<< + * self._current_header_name is not None): + * raise HttpParserError('invalid headers state') + */ + } + + /* "httptools/parser/parser.pyx":136 + * raise HttpParserError('invalid headers state') + * + * if self._proto_on_chunk_header is not None: # <<<<<<<<<<<<<< + * self._proto_on_chunk_header() + * + */ + __pyx_t_1 = (__pyx_v_self->_proto_on_chunk_header != Py_None); + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "httptools/parser/parser.pyx":137 + * + * if self._proto_on_chunk_header is not None: + * self._proto_on_chunk_header() # <<<<<<<<<<<<<< + * + * cdef _on_chunk_complete(self): + */ + __Pyx_INCREF(__pyx_v_self->_proto_on_chunk_header); + __pyx_t_5 = __pyx_v_self->_proto_on_chunk_header; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "httptools/parser/parser.pyx":136 + * raise HttpParserError('invalid headers state') + * + * if self._proto_on_chunk_header is not None: # <<<<<<<<<<<<<< + * self._proto_on_chunk_header() + * + */ + } + + /* "httptools/parser/parser.pyx":131 + * self._proto_on_headers_complete() + * + * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< + * if (self._current_header_value is not None or + * self._current_header_name is not None): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":139 + * self._proto_on_chunk_header() + * + * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("_on_chunk_complete", 0); + + /* "httptools/parser/parser.pyx":140 + * + * cdef _on_chunk_complete(self): + * self._maybe_call_on_header() # <<<<<<<<<<<<<< + * + * if self._proto_on_chunk_complete is not None: + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_maybe_call_on_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":142 + * self._maybe_call_on_header() + * + * if self._proto_on_chunk_complete is not None: # <<<<<<<<<<<<<< + * self._proto_on_chunk_complete() + * + */ + __pyx_t_2 = (__pyx_v_self->_proto_on_chunk_complete != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":143 + * + * if self._proto_on_chunk_complete is not None: + * self._proto_on_chunk_complete() # <<<<<<<<<<<<<< + * + * ### Public API ### + */ + __Pyx_INCREF(__pyx_v_self->_proto_on_chunk_complete); + __pyx_t_4 = __pyx_v_self->_proto_on_chunk_complete; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_1 = (__pyx_t_5) ? __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5) : __Pyx_PyObject_CallNoArg(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":142 + * self._maybe_call_on_header() + * + * if self._proto_on_chunk_complete is not None: # <<<<<<<<<<<<<< + * self._proto_on_chunk_complete() + * + */ + } + + /* "httptools/parser/parser.pyx":139 + * self._proto_on_chunk_header() + * + * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< + * self._maybe_call_on_header() + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser._on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":147 + * ### Public API ### + * + * def get_http_version(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return '{}.{}'.format(parser.http_major, parser.http_minor) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_http_version (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_4get_http_version(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + llhttp_t *__pyx_v_parser; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + llhttp_t *__pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_http_version", 0); + + /* "httptools/parser/parser.pyx":148 + * + * def get_http_version(self): + * cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<< + * return '{}.{}'.format(parser.http_major, parser.http_minor) + * + */ + __pyx_t_1 = __pyx_v_self->_cparser; + __pyx_v_parser = __pyx_t_1; + + /* "httptools/parser/parser.pyx":149 + * def get_http_version(self): + * cdef cparser.llhttp_t* parser = self._cparser + * return '{}.{}'.format(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<< + * + * def should_keep_alive(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_t_5); + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 149, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":147 + * ### Public API ### + * + * def get_http_version(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return '{}.{}'.format(parser.http_major, parser.http_minor) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.get_http_version", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":151 + * return '{}.{}'.format(parser.http_major, parser.http_minor) + * + * def should_keep_alive(self): # <<<<<<<<<<<<<< + * return bool(cparser.llhttp_should_keep_alive(self._cparser)) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("should_keep_alive (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_6should_keep_alive(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("should_keep_alive", 0); + + /* "httptools/parser/parser.pyx":152 + * + * def should_keep_alive(self): + * return bool(cparser.llhttp_should_keep_alive(self._cparser)) # <<<<<<<<<<<<<< + * + * def should_upgrade(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(llhttp_should_keep_alive(__pyx_v_self->_cparser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyBool_FromLong((!(!__pyx_t_2))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":151 + * return '{}.{}'.format(parser.http_major, parser.http_minor) + * + * def should_keep_alive(self): # <<<<<<<<<<<<<< + * return bool(cparser.llhttp_should_keep_alive(self._cparser)) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.should_keep_alive", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":154 + * return bool(cparser.llhttp_should_keep_alive(self._cparser)) + * + * def should_upgrade(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return bool(parser.upgrade) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("should_upgrade (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_8should_upgrade(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + llhttp_t *__pyx_v_parser; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + llhttp_t *__pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("should_upgrade", 0); + + /* "httptools/parser/parser.pyx":155 + * + * def should_upgrade(self): + * cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<< + * return bool(parser.upgrade) + * + */ + __pyx_t_1 = __pyx_v_self->_cparser; + __pyx_v_parser = __pyx_t_1; + + /* "httptools/parser/parser.pyx":156 + * def should_upgrade(self): + * cdef cparser.llhttp_t* parser = self._cparser + * return bool(parser.upgrade) # <<<<<<<<<<<<<< + * + * def feed_data(self, data): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_uint8_t(__pyx_v_parser->upgrade); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyBool_FromLong((!(!__pyx_t_3))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":154 + * return bool(cparser.llhttp_should_keep_alive(self._cparser)) + * + * def should_upgrade(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return bool(parser.upgrade) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.should_upgrade", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":158 + * return bool(parser.upgrade) + * + * def feed_data(self, data): # <<<<<<<<<<<<<< + * cdef: + * size_t data_len + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("feed_data (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_data)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_10feed_data(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data) { + size_t __pyx_v_data_len; + llhttp_errno_t __pyx_v_err; + Py_buffer *__pyx_v_buf; + int __pyx_v_owning_buf; + char *__pyx_v_err_pos; + PyObject *__pyx_v_ex = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + char const *__pyx_t_9; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("feed_data", 0); + + /* "httptools/parser/parser.pyx":163 + * cparser.llhttp_errno_t err + * Py_buffer *buf + * bint owning_buf = False # <<<<<<<<<<<<<< + * char* err_pos + * + */ + __pyx_v_owning_buf = 0; + + /* "httptools/parser/parser.pyx":166 + * char* err_pos + * + * if PyMemoryView_Check(data): # <<<<<<<<<<<<<< + * buf = PyMemoryView_GET_BUFFER(data) + * data_len = buf.len + */ + __pyx_t_1 = (PyMemoryView_Check(__pyx_v_data) != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":167 + * + * if PyMemoryView_Check(data): + * buf = PyMemoryView_GET_BUFFER(data) # <<<<<<<<<<<<<< + * data_len = buf.len + * err = cparser.llhttp_execute( + */ + __pyx_v_buf = PyMemoryView_GET_BUFFER(__pyx_v_data); + + /* "httptools/parser/parser.pyx":168 + * if PyMemoryView_Check(data): + * buf = PyMemoryView_GET_BUFFER(data) + * data_len = buf.len # <<<<<<<<<<<<<< + * err = cparser.llhttp_execute( + * self._cparser, + */ + __pyx_v_data_len = ((size_t)__pyx_v_buf->len); + + /* "httptools/parser/parser.pyx":169 + * buf = PyMemoryView_GET_BUFFER(data) + * data_len = buf.len + * err = cparser.llhttp_execute( # <<<<<<<<<<<<<< + * self._cparser, + * buf.buf, + */ + __pyx_v_err = llhttp_execute(__pyx_v_self->_cparser, ((char *)__pyx_v_buf->buf), __pyx_v_data_len); + + /* "httptools/parser/parser.pyx":166 + * char* err_pos + * + * if PyMemoryView_Check(data): # <<<<<<<<<<<<<< + * buf = PyMemoryView_GET_BUFFER(data) + * data_len = buf.len + */ + goto __pyx_L3; + } + + /* "httptools/parser/parser.pyx":175 + * + * else: + * buf = &self.py_buf # <<<<<<<<<<<<<< + * PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) + * owning_buf = True + */ + /*else*/ { + __pyx_v_buf = (&__pyx_v_self->py_buf); + + /* "httptools/parser/parser.pyx":176 + * else: + * buf = &self.py_buf + * PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< + * owning_buf = True + * data_len = buf.len + */ + __pyx_t_2 = PyObject_GetBuffer(__pyx_v_data, __pyx_v_buf, PyBUF_SIMPLE); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 176, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":177 + * buf = &self.py_buf + * PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) + * owning_buf = True # <<<<<<<<<<<<<< + * data_len = buf.len + * + */ + __pyx_v_owning_buf = 1; + + /* "httptools/parser/parser.pyx":178 + * PyObject_GetBuffer(data, buf, PyBUF_SIMPLE) + * owning_buf = True + * data_len = buf.len # <<<<<<<<<<<<<< + * + * err = cparser.llhttp_execute( + */ + __pyx_v_data_len = ((size_t)__pyx_v_buf->len); + + /* "httptools/parser/parser.pyx":180 + * data_len = buf.len + * + * err = cparser.llhttp_execute( # <<<<<<<<<<<<<< + * self._cparser, + * buf.buf, + */ + __pyx_v_err = llhttp_execute(__pyx_v_self->_cparser, ((char *)__pyx_v_buf->buf), __pyx_v_data_len); + } + __pyx_L3:; + + /* "httptools/parser/parser.pyx":185 + * data_len) + * + * try: # <<<<<<<<<<<<<< + * if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: + * err_pos = cparser.llhttp_get_error_pos(self._cparser) + */ + /*try:*/ { + + /* "httptools/parser/parser.pyx":186 + * + * try: + * if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<< + * err_pos = cparser.llhttp_get_error_pos(self._cparser) + * + */ + __pyx_t_3 = ((__pyx_v_self->_cparser->upgrade == 1) != 0); + if (__pyx_t_3) { + } else { + __pyx_t_1 = __pyx_t_3; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_3 = ((__pyx_v_err == HPE_PAUSED_UPGRADE) != 0); + __pyx_t_1 = __pyx_t_3; + __pyx_L8_bool_binop_done:; + if (unlikely(__pyx_t_1)) { + + /* "httptools/parser/parser.pyx":187 + * try: + * if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: + * err_pos = cparser.llhttp_get_error_pos(self._cparser) # <<<<<<<<<<<<<< + * + * # Immediately free the parser from "error" state, simulating + */ + __pyx_v_err_pos = llhttp_get_error_pos(__pyx_v_self->_cparser); + + /* "httptools/parser/parser.pyx":193 + * # allow users manually "resume after upgrade", and 2) the use + * # case for resuming parsing is very rare. + * cparser.llhttp_resume_after_upgrade(self._cparser) # <<<<<<<<<<<<<< + * + * # The err_pos here is specific for the input buf. So if we ever + */ + llhttp_resume_after_upgrade(__pyx_v_self->_cparser); + + /* "httptools/parser/parser.pyx":199 + * # successive calls to feed_data() until resume_after_upgrade is + * # called), we have to store the result and keep our own state. + * raise HttpParserUpgrade(err_pos - buf.buf) # <<<<<<<<<<<<<< + * finally: + * if owning_buf: + */ + __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_HttpParserUpgrade); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 199, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyInt_From_ptrdiff_t((__pyx_v_err_pos - ((char *)__pyx_v_buf->buf))); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 199, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_4 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_7, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 199, __pyx_L5_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_Raise(__pyx_t_4, 0, 0, 0); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __PYX_ERR(0, 199, __pyx_L5_error) + + /* "httptools/parser/parser.pyx":186 + * + * try: + * if self._cparser.upgrade == 1 and err == cparser.HPE_PAUSED_UPGRADE: # <<<<<<<<<<<<<< + * err_pos = cparser.llhttp_get_error_pos(self._cparser) + * + */ + } + } + + /* "httptools/parser/parser.pyx":201 + * raise HttpParserUpgrade(err_pos - buf.buf) + * finally: + * if owning_buf: # <<<<<<<<<<<<<< + * PyBuffer_Release(buf) + * + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_t_1 = (__pyx_v_owning_buf != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":202 + * finally: + * if owning_buf: + * PyBuffer_Release(buf) # <<<<<<<<<<<<<< + * + * if err != cparser.HPE_OK: + */ + PyBuffer_Release(__pyx_v_buf); + + /* "httptools/parser/parser.pyx":201 + * raise HttpParserUpgrade(err_pos - buf.buf) + * finally: + * if owning_buf: # <<<<<<<<<<<<<< + * PyBuffer_Release(buf) + * + */ + } + goto __pyx_L6; + } + __pyx_L5_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __pyx_t_2 = __pyx_lineno; __pyx_t_8 = __pyx_clineno; __pyx_t_9 = __pyx_filename; + { + __pyx_t_1 = (__pyx_v_owning_buf != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":202 + * finally: + * if owning_buf: + * PyBuffer_Release(buf) # <<<<<<<<<<<<<< + * + * if err != cparser.HPE_OK: + */ + PyBuffer_Release(__pyx_v_buf); + + /* "httptools/parser/parser.pyx":201 + * raise HttpParserUpgrade(err_pos - buf.buf) + * finally: + * if owning_buf: # <<<<<<<<<<<<<< + * PyBuffer_Release(buf) + * + */ + } + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + } + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; + __pyx_lineno = __pyx_t_2; __pyx_clineno = __pyx_t_8; __pyx_filename = __pyx_t_9; + goto __pyx_L1_error; + } + __pyx_L6:; + } + + /* "httptools/parser/parser.pyx":204 + * PyBuffer_Release(buf) + * + * if err != cparser.HPE_OK: # <<<<<<<<<<<<<< + * ex = parser_error_from_errno( + * self._cparser, + */ + __pyx_t_1 = ((__pyx_v_err != HPE_OK) != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":205 + * + * if err != cparser.HPE_OK: + * ex = parser_error_from_errno( # <<<<<<<<<<<<<< + * self._cparser, + * self._cparser.error) + */ + __pyx_t_4 = __pyx_f_9httptools_6parser_6parser_parser_error_from_errno(__pyx_v_self->_cparser, ((llhttp_errno_t)__pyx_v_self->_cparser->error)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 205, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_v_ex = __pyx_t_4; + __pyx_t_4 = 0; + + /* "httptools/parser/parser.pyx":208 + * self._cparser, + * self._cparser.error) + * if isinstance(ex, HttpParserCallbackError): # <<<<<<<<<<<<<< + * if self._last_error is not None: + * ex.__context__ = self._last_error + */ + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 208, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyObject_IsInstance(__pyx_v_ex, __pyx_t_4); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 208, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_3 = (__pyx_t_1 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":209 + * self._cparser.error) + * if isinstance(ex, HttpParserCallbackError): + * if self._last_error is not None: # <<<<<<<<<<<<<< + * ex.__context__ = self._last_error + * self._last_error = None + */ + __pyx_t_3 = (__pyx_v_self->_last_error != Py_None); + __pyx_t_1 = (__pyx_t_3 != 0); + if (__pyx_t_1) { + + /* "httptools/parser/parser.pyx":210 + * if isinstance(ex, HttpParserCallbackError): + * if self._last_error is not None: + * ex.__context__ = self._last_error # <<<<<<<<<<<<<< + * self._last_error = None + * raise ex + */ + __pyx_t_4 = __pyx_v_self->_last_error; + __Pyx_INCREF(__pyx_t_4); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_ex, __pyx_n_s_context, __pyx_t_4) < 0) __PYX_ERR(0, 210, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "httptools/parser/parser.pyx":211 + * if self._last_error is not None: + * ex.__context__ = self._last_error + * self._last_error = None # <<<<<<<<<<<<<< + * raise ex + * + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_last_error); + __Pyx_DECREF(__pyx_v_self->_last_error); + __pyx_v_self->_last_error = Py_None; + + /* "httptools/parser/parser.pyx":209 + * self._cparser.error) + * if isinstance(ex, HttpParserCallbackError): + * if self._last_error is not None: # <<<<<<<<<<<<<< + * ex.__context__ = self._last_error + * self._last_error = None + */ + } + + /* "httptools/parser/parser.pyx":208 + * self._cparser, + * self._cparser.error) + * if isinstance(ex, HttpParserCallbackError): # <<<<<<<<<<<<<< + * if self._last_error is not None: + * ex.__context__ = self._last_error + */ + } + + /* "httptools/parser/parser.pyx":212 + * ex.__context__ = self._last_error + * self._last_error = None + * raise ex # <<<<<<<<<<<<<< + * + * + */ + __Pyx_Raise(__pyx_v_ex, 0, 0, 0); + __PYX_ERR(0, 212, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":204 + * PyBuffer_Release(buf) + * + * if err != cparser.HPE_OK: # <<<<<<<<<<<<<< + * ex = parser_error_from_errno( + * self._cparser, + */ + } + + /* "httptools/parser/parser.pyx":158 + * return bool(parser.upgrade) + * + * def feed_data(self, data): # <<<<<<<<<<<<<< + * cdef: + * size_t data_len + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.feed_data", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_12__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_10HttpParser_14__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":217 + * cdef class HttpRequestParser(HttpParser): + * + * def __init__(self, protocol): # <<<<<<<<<<<<<< + * self._init(protocol, cparser.HTTP_REQUEST) + * + */ + +/* Python wrapper */ +static int __pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_protocol = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 217, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_protocol = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 217, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self), __pyx_v_protocol); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, PyObject *__pyx_v_protocol) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "httptools/parser/parser.pyx":218 + * + * def __init__(self, protocol): + * self._init(protocol, cparser.HTTP_REQUEST) # <<<<<<<<<<<<<< + * + * self._proto_on_url = getattr(protocol, 'on_url', None) + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), __pyx_v_protocol, HTTP_REQUEST); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 218, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":220 + * self._init(protocol, cparser.HTTP_REQUEST) + * + * self._proto_on_url = getattr(protocol, 'on_url', None) # <<<<<<<<<<<<<< + * if self._proto_on_url is not None: + * self._csettings.on_url = cb_on_url + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_url, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 220, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._proto_on_url); + __Pyx_DECREF(__pyx_v_self->__pyx_base._proto_on_url); + __pyx_v_self->__pyx_base._proto_on_url = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":221 + * + * self._proto_on_url = getattr(protocol, 'on_url', None) + * if self._proto_on_url is not None: # <<<<<<<<<<<<<< + * self._csettings.on_url = cb_on_url + * + */ + __pyx_t_2 = (__pyx_v_self->__pyx_base._proto_on_url != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":222 + * self._proto_on_url = getattr(protocol, 'on_url', None) + * if self._proto_on_url is not None: + * self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<< + * + * def get_method(self): + */ + __pyx_v_self->__pyx_base._csettings->on_url = __pyx_f_9httptools_6parser_6parser_cb_on_url; + + /* "httptools/parser/parser.pyx":221 + * + * self._proto_on_url = getattr(protocol, 'on_url', None) + * if self._proto_on_url is not None: # <<<<<<<<<<<<<< + * self._csettings.on_url = cb_on_url + * + */ + } + + /* "httptools/parser/parser.pyx":217 + * cdef class HttpRequestParser(HttpParser): + * + * def __init__(self, protocol): # <<<<<<<<<<<<<< + * self._init(protocol, cparser.HTTP_REQUEST) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":224 + * self._csettings.on_url = cb_on_url + * + * def get_method(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return cparser.llhttp_method_name( parser.method) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_method (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_2get_method(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self) { + llhttp_t *__pyx_v_parser; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + llhttp_t *__pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_method", 0); + + /* "httptools/parser/parser.pyx":225 + * + * def get_method(self): + * cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<< + * return cparser.llhttp_method_name( parser.method) + * + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._cparser; + __pyx_v_parser = __pyx_t_1; + + /* "httptools/parser/parser.pyx":226 + * def get_method(self): + * cdef cparser.llhttp_t* parser = self._cparser + * return cparser.llhttp_method_name( parser.method) # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyBytes_FromString(llhttp_method_name(((llhttp_method_t)__pyx_v_parser->method))); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 226, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":224 + * self._csettings.on_url = cb_on_url + * + * def get_method(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return cparser.llhttp_method_name( parser.method) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.get_method", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_17HttpRequestParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpRequestParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":231 + * cdef class HttpResponseParser(HttpParser): + * + * def __init__(self, protocol): # <<<<<<<<<<<<<< + * self._init(protocol, cparser.HTTP_RESPONSE) + * + */ + +/* Python wrapper */ +static int __pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_protocol = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 231, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_protocol = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 231, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self), __pyx_v_protocol); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser___init__(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, PyObject *__pyx_v_protocol) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "httptools/parser/parser.pyx":232 + * + * def __init__(self, protocol): + * self._init(protocol, cparser.HTTP_RESPONSE) # <<<<<<<<<<<<<< + * + * self._proto_on_status = getattr(protocol, 'on_status', None) + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_v_self), __pyx_v_protocol, HTTP_RESPONSE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 232, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":234 + * self._init(protocol, cparser.HTTP_RESPONSE) + * + * self._proto_on_status = getattr(protocol, 'on_status', None) # <<<<<<<<<<<<<< + * if self._proto_on_status is not None: + * self._csettings.on_status = cb_on_status + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_protocol, __pyx_n_u_on_status, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->__pyx_base._proto_on_status); + __Pyx_DECREF(__pyx_v_self->__pyx_base._proto_on_status); + __pyx_v_self->__pyx_base._proto_on_status = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":235 + * + * self._proto_on_status = getattr(protocol, 'on_status', None) + * if self._proto_on_status is not None: # <<<<<<<<<<<<<< + * self._csettings.on_status = cb_on_status + * + */ + __pyx_t_2 = (__pyx_v_self->__pyx_base._proto_on_status != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "httptools/parser/parser.pyx":236 + * self._proto_on_status = getattr(protocol, 'on_status', None) + * if self._proto_on_status is not None: + * self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<< + * + * def get_status_code(self): + */ + __pyx_v_self->__pyx_base._csettings->on_status = __pyx_f_9httptools_6parser_6parser_cb_on_status; + + /* "httptools/parser/parser.pyx":235 + * + * self._proto_on_status = getattr(protocol, 'on_status', None) + * if self._proto_on_status is not None: # <<<<<<<<<<<<<< + * self._csettings.on_status = cb_on_status + * + */ + } + + /* "httptools/parser/parser.pyx":231 + * cdef class HttpResponseParser(HttpParser): + * + * def __init__(self, protocol): # <<<<<<<<<<<<<< + * self._init(protocol, cparser.HTTP_RESPONSE) + * + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":238 + * self._csettings.on_status = cb_on_status + * + * def get_status_code(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return parser.status_code + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_status_code (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_2get_status_code(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self) { + llhttp_t *__pyx_v_parser; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + llhttp_t *__pyx_t_1; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("get_status_code", 0); + + /* "httptools/parser/parser.pyx":239 + * + * def get_status_code(self): + * cdef cparser.llhttp_t* parser = self._cparser # <<<<<<<<<<<<<< + * return parser.status_code + * + */ + __pyx_t_1 = __pyx_v_self->__pyx_base._cparser; + __pyx_v_parser = __pyx_t_1; + + /* "httptools/parser/parser.pyx":240 + * def get_status_code(self): + * cdef cparser.llhttp_t* parser = self._cparser + * return parser.status_code # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyInt_From_uint16_t(__pyx_v_parser->status_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 240, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":238 + * self._csettings.on_status = cb_on_status + * + * def get_status_code(self): # <<<<<<<<<<<<<< + * cdef cparser.llhttp_t* parser = self._cparser + * return parser.status_code + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.get_status_code", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_6parser_18HttpResponseParser_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.parser.HttpResponseParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":243 + * + * + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_message_begin(llhttp_t *__pyx_v_parser) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_message_begin", 0); + + /* "httptools/parser/parser.pyx":244 + * + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._proto_on_message_begin() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":245 + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_begin() + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":246 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._proto_on_message_begin() # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __Pyx_INCREF(__pyx_v_pyparser->_proto_on_message_begin); + __pyx_t_5 = __pyx_v_pyparser->_proto_on_message_begin; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 246, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":245 + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_begin() + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":251 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "httptools/parser/parser.pyx":247 + * try: + * pyparser._proto_on_message_begin() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_7) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 247, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_5); + __pyx_v_ex = __pyx_t_5; + /*try:*/ { + + /* "httptools/parser/parser.pyx":248 + * pyparser._proto_on_message_begin() + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":249 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":247 + * try: + * pyparser._proto_on_message_begin() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_7 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_7; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":245 + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_begin() + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":243 + * + * + * cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":254 + * + * + * cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_url(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_url", 0); + + /* "httptools/parser/parser.pyx":256 + * cdef int cb_on_url(cparser.llhttp_t* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._proto_on_url(at[:length]) + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":257 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":258 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._proto_on_url(at[:length]) # <<<<<<<<<<<<<< + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + */ + __pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 258, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_pyparser->_proto_on_url); + __pyx_t_6 = __pyx_v_pyparser->_proto_on_url; __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 258, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":257 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":264 + * return cparser.HPE_USER + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "httptools/parser/parser.pyx":259 + * try: + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + * pyparser._last_error = ex + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_8) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 259, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_ex = __pyx_t_6; + /*try:*/ { + + /* "httptools/parser/parser.pyx":260 + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return cparser.HPE_USER + */ + llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_url` callback error")); + + /* "httptools/parser/parser.pyx":261 + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return cparser.HPE_USER + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":262 + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + * pyparser._last_error = ex + * return cparser.HPE_USER # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = HPE_USER; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":259 + * try: + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_url` callback error") + * pyparser._last_error = ex + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_8 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_8; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":257 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_url(at[:length]) + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":254 + * + * + * cdef int cb_on_url(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":267 + * + * + * cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_status(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_status", 0); + + /* "httptools/parser/parser.pyx":269 + * cdef int cb_on_status(cparser.llhttp_t* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._proto_on_status(at[:length]) + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":270 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":271 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._proto_on_status(at[:length]) # <<<<<<<<<<<<<< + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + */ + __pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 271, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_pyparser->_proto_on_status); + __pyx_t_6 = __pyx_v_pyparser->_proto_on_status; __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 271, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":270 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":277 + * return cparser.HPE_USER + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "httptools/parser/parser.pyx":272 + * try: + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + * pyparser._last_error = ex + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_8) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 272, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_ex = __pyx_t_6; + /*try:*/ { + + /* "httptools/parser/parser.pyx":273 + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return cparser.HPE_USER + */ + llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_status` callback error")); + + /* "httptools/parser/parser.pyx":274 + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return cparser.HPE_USER + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":275 + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + * pyparser._last_error = ex + * return cparser.HPE_USER # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = HPE_USER; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":272 + * try: + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_status` callback error") + * pyparser._last_error = ex + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_8 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_8; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":270 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_status(at[:length]) + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":267 + * + * + * cdef int cb_on_status(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":280 + * + * + * cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_header_field(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_header_field", 0); + + /* "httptools/parser/parser.pyx":282 + * cdef int cb_on_header_field(cparser.llhttp_t* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_header_field(at[:length]) + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":283 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":284 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_header_field(at[:length]) # <<<<<<<<<<<<<< + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + */ + __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 284, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 284, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "httptools/parser/parser.pyx":283 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":290 + * return cparser.HPE_USER + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "httptools/parser/parser.pyx":285 + * try: + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + * pyparser._last_error = ex + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_6) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 285, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_ex = __pyx_t_1; + /*try:*/ { + + /* "httptools/parser/parser.pyx":286 + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return cparser.HPE_USER + */ + llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_header_field` callback error")); + + /* "httptools/parser/parser.pyx":287 + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return cparser.HPE_USER + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":288 + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + * pyparser._last_error = ex + * return cparser.HPE_USER # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = HPE_USER; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":285 + * try: + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_header_field` callback error") + * pyparser._last_error = ex + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_6 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_6; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":283 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_field(at[:length]) + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":280 + * + * + * cdef int cb_on_header_field(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":293 + * + * + * cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_header_value(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_header_value", 0); + + /* "httptools/parser/parser.pyx":295 + * cdef int cb_on_header_value(cparser.llhttp_t* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_header_value(at[:length]) + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":296 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":297 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_header_value(at[:length]) # <<<<<<<<<<<<<< + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + */ + __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 297, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, ((PyObject*)__pyx_t_1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 297, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "httptools/parser/parser.pyx":296 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":303 + * return cparser.HPE_USER + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "httptools/parser/parser.pyx":298 + * try: + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + * pyparser._last_error = ex + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_6) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_7) < 0) __PYX_ERR(0, 298, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_ex = __pyx_t_1; + /*try:*/ { + + /* "httptools/parser/parser.pyx":299 + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return cparser.HPE_USER + */ + llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_header_value` callback error")); + + /* "httptools/parser/parser.pyx":300 + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return cparser.HPE_USER + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":301 + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + * pyparser._last_error = ex + * return cparser.HPE_USER # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = HPE_USER; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":298 + * try: + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_header_value` callback error") + * pyparser._last_error = ex + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_6 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_6; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":296 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_header_value(at[:length]) + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":293 + * + * + * cdef int cb_on_header_value(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":306 + * + * + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_headers_complete(llhttp_t *__pyx_v_parser) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_headers_complete", 0); + + /* "httptools/parser/parser.pyx":307 + * + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_headers_complete() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":308 + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_headers_complete() + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":309 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_headers_complete() # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 309, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":308 + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_headers_complete() + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":314 + * return -1 + * else: + * if pyparser._cparser.upgrade: # <<<<<<<<<<<<<< + * return 1 + * else: + */ + /*else:*/ { + __pyx_t_5 = (__pyx_v_pyparser->_cparser->upgrade != 0); + if (__pyx_t_5) { + + /* "httptools/parser/parser.pyx":315 + * else: + * if pyparser._cparser.upgrade: + * return 1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = 1; + goto __pyx_L6_except_return; + + /* "httptools/parser/parser.pyx":314 + * return -1 + * else: + * if pyparser._cparser.upgrade: # <<<<<<<<<<<<<< + * return 1 + * else: + */ + } + + /* "httptools/parser/parser.pyx":317 + * return 1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":310 + * try: + * pyparser._on_headers_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_6 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_6) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_7, &__pyx_t_8) < 0) __PYX_ERR(0, 310, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __pyx_v_ex = __pyx_t_7; + /*try:*/ { + + /* "httptools/parser/parser.pyx":311 + * pyparser._on_headers_complete() + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":312 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * if pyparser._cparser.upgrade: + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L14_return; + } + + /* "httptools/parser/parser.pyx":310 + * try: + * pyparser._on_headers_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L14_return: { + __pyx_t_6 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_6; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":308 + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_headers_complete() + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":306 + * + * + * cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":320 + * + * + * cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_body(llhttp_t *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_body", 0); + + /* "httptools/parser/parser.pyx":322 + * cdef int cb_on_body(cparser.llhttp_t* parser, + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._proto_on_body(at[:length]) + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":323 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":324 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._proto_on_body(at[:length]) # <<<<<<<<<<<<<< + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + */ + __pyx_t_5 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 324, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_pyparser->_proto_on_body); + __pyx_t_6 = __pyx_v_pyparser->_proto_on_body; __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + __pyx_t_1 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_7, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 324, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":323 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":330 + * return cparser.HPE_USER + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "httptools/parser/parser.pyx":325 + * try: + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + * pyparser._last_error = ex + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_8) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 325, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_ex = __pyx_t_6; + /*try:*/ { + + /* "httptools/parser/parser.pyx":326 + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return cparser.HPE_USER + */ + llhttp_set_error_reason(__pyx_v_parser, ((char const *)"`on_body` callback error")); + + /* "httptools/parser/parser.pyx":327 + * except BaseException as ex: + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return cparser.HPE_USER + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":328 + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + * pyparser._last_error = ex + * return cparser.HPE_USER # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = HPE_USER; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":325 + * try: + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: # <<<<<<<<<<<<<< + * cparser.llhttp_set_error_reason(parser, "`on_body` callback error") + * pyparser._last_error = ex + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_8 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_8; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":323 + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_body(at[:length]) + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":320 + * + * + * cdef int cb_on_body(cparser.llhttp_t* parser, # <<<<<<<<<<<<<< + * const char *at, size_t length) except -1: + * cdef HttpParser pyparser = parser.data + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":333 + * + * + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_message_complete(llhttp_t *__pyx_v_parser) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_message_complete", 0); + + /* "httptools/parser/parser.pyx":334 + * + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._proto_on_message_complete() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":335 + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_complete() + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":336 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._proto_on_message_complete() # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __Pyx_INCREF(__pyx_v_pyparser->_proto_on_message_complete); + __pyx_t_5 = __pyx_v_pyparser->_proto_on_message_complete; __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + __pyx_t_1 = (__pyx_t_6) ? __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallNoArg(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 336, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":335 + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_complete() + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":341 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "httptools/parser/parser.pyx":337 + * try: + * pyparser._proto_on_message_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_7) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_6) < 0) __PYX_ERR(0, 337, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_5); + __pyx_v_ex = __pyx_t_5; + /*try:*/ { + + /* "httptools/parser/parser.pyx":338 + * pyparser._proto_on_message_complete() + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":339 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":337 + * try: + * pyparser._proto_on_message_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_7 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_7; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":335 + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._proto_on_message_complete() + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":333 + * + * + * cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":344 + * + * + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_header(llhttp_t *__pyx_v_parser) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_chunk_header", 0); + + /* "httptools/parser/parser.pyx":345 + * + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_chunk_header() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":346 + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":347 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_chunk_header() # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 347, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":346 + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":352 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":348 + * try: + * pyparser._on_chunk_header() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_5) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 348, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_ex = __pyx_t_6; + /*try:*/ { + + /* "httptools/parser/parser.pyx":349 + * pyparser._on_chunk_header() + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":350 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":348 + * try: + * pyparser._on_chunk_header() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_5 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_5; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":346 + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_header() + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":344 + * + * + * cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":355 + * + * + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + +static int __pyx_f_9httptools_6parser_6parser_cb_on_chunk_complete(llhttp_t *__pyx_v_parser) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *__pyx_v_pyparser = 0; + PyObject *__pyx_v_ex = NULL; + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("cb_on_chunk_complete", 0); + + /* "httptools/parser/parser.pyx":356 + * + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< + * try: + * pyparser._on_chunk_complete() + */ + __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_pyparser = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":357 + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as ex: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "httptools/parser/parser.pyx":358 + * cdef HttpParser pyparser = parser.data + * try: + * pyparser._on_chunk_complete() # <<<<<<<<<<<<<< + * except BaseException as ex: + * pyparser._last_error = ex + */ + __pyx_t_1 = ((struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":357 + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as ex: + */ + } + + /* "httptools/parser/parser.pyx":363 + * return -1 + * else: + * return 0 # <<<<<<<<<<<<<< + * + * + */ + /*else:*/ { + __pyx_r = 0; + goto __pyx_L6_except_return; + } + __pyx_L3_error:; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":359 + * try: + * pyparser._on_chunk_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); + if (__pyx_t_5) { + __Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 359, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __pyx_v_ex = __pyx_t_6; + /*try:*/ { + + /* "httptools/parser/parser.pyx":360 + * pyparser._on_chunk_complete() + * except BaseException as ex: + * pyparser._last_error = ex # <<<<<<<<<<<<<< + * return -1 + * else: + */ + __Pyx_INCREF(__pyx_v_ex); + __Pyx_GIVEREF(__pyx_v_ex); + __Pyx_GOTREF(__pyx_v_pyparser->_last_error); + __Pyx_DECREF(__pyx_v_pyparser->_last_error); + __pyx_v_pyparser->_last_error = __pyx_v_ex; + + /* "httptools/parser/parser.pyx":361 + * except BaseException as ex: + * pyparser._last_error = ex + * return -1 # <<<<<<<<<<<<<< + * else: + * return 0 + */ + __pyx_r = -1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L13_return; + } + + /* "httptools/parser/parser.pyx":359 + * try: + * pyparser._on_chunk_complete() + * except BaseException as ex: # <<<<<<<<<<<<<< + * pyparser._last_error = ex + * return -1 + */ + /*finally:*/ { + __pyx_L13_return: { + __pyx_t_5 = __pyx_r; + __Pyx_DECREF(__pyx_v_ex); + __pyx_v_ex = NULL; + __pyx_r = __pyx_t_5; + goto __pyx_L6_except_return; + } + } + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "httptools/parser/parser.pyx":357 + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: + * cdef HttpParser pyparser = parser.data + * try: # <<<<<<<<<<<<<< + * pyparser._on_chunk_complete() + * except BaseException as ex: + */ + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L6_except_return:; + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "httptools/parser/parser.pyx":355 + * + * + * cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: # <<<<<<<<<<<<<< + * cdef HttpParser pyparser = parser.data + * try: + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("httptools.parser.parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); + __Pyx_XDECREF(__pyx_v_ex); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/parser.pyx":366 + * + * + * cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): # <<<<<<<<<<<<<< + * cdef bytes reason = cparser.llhttp_get_error_reason(parser) + * + */ + +static PyObject *__pyx_f_9httptools_6parser_6parser_parser_error_from_errno(llhttp_t *__pyx_v_parser, llhttp_errno_t __pyx_v_errno) { + PyObject *__pyx_v_reason = 0; + PyObject *__pyx_v_cls = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("parser_error_from_errno", 0); + + /* "httptools/parser/parser.pyx":367 + * + * cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): + * cdef bytes reason = cparser.llhttp_get_error_reason(parser) # <<<<<<<<<<<<<< + * + * if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + */ + __pyx_t_1 = __Pyx_PyBytes_FromString(llhttp_get_error_reason(__pyx_v_parser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 367, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_reason = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":369 + * cdef bytes reason = cparser.llhttp_get_error_reason(parser) + * + * if errno in (cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<< + * cparser.HPE_CB_HEADERS_COMPLETE, + * cparser.HPE_CB_MESSAGE_COMPLETE, + */ + switch (__pyx_v_errno) { + case HPE_CB_MESSAGE_BEGIN: + case HPE_CB_HEADERS_COMPLETE: + + /* "httptools/parser/parser.pyx":370 + * + * if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + * cparser.HPE_CB_HEADERS_COMPLETE, # <<<<<<<<<<<<<< + * cparser.HPE_CB_MESSAGE_COMPLETE, + * cparser.HPE_CB_CHUNK_HEADER, + */ + case HPE_CB_MESSAGE_COMPLETE: + + /* "httptools/parser/parser.pyx":371 + * if errno in (cparser.HPE_CB_MESSAGE_BEGIN, + * cparser.HPE_CB_HEADERS_COMPLETE, + * cparser.HPE_CB_MESSAGE_COMPLETE, # <<<<<<<<<<<<<< + * cparser.HPE_CB_CHUNK_HEADER, + * cparser.HPE_CB_CHUNK_COMPLETE, + */ + case HPE_CB_CHUNK_HEADER: + + /* "httptools/parser/parser.pyx":372 + * cparser.HPE_CB_HEADERS_COMPLETE, + * cparser.HPE_CB_MESSAGE_COMPLETE, + * cparser.HPE_CB_CHUNK_HEADER, # <<<<<<<<<<<<<< + * cparser.HPE_CB_CHUNK_COMPLETE, + * cparser.HPE_USER): + */ + case HPE_CB_CHUNK_COMPLETE: + + /* "httptools/parser/parser.pyx":373 + * cparser.HPE_CB_MESSAGE_COMPLETE, + * cparser.HPE_CB_CHUNK_HEADER, + * cparser.HPE_CB_CHUNK_COMPLETE, # <<<<<<<<<<<<<< + * cparser.HPE_USER): + * cls = HttpParserCallbackError + */ + case HPE_USER: + + /* "httptools/parser/parser.pyx":375 + * cparser.HPE_CB_CHUNK_COMPLETE, + * cparser.HPE_USER): + * cls = HttpParserCallbackError # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_STATUS: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 375, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":369 + * cdef bytes reason = cparser.llhttp_get_error_reason(parser) + * + * if errno in (cparser.HPE_CB_MESSAGE_BEGIN, # <<<<<<<<<<<<<< + * cparser.HPE_CB_HEADERS_COMPLETE, + * cparser.HPE_CB_MESSAGE_COMPLETE, + */ + break; + case HPE_INVALID_STATUS: + + /* "httptools/parser/parser.pyx":378 + * + * elif errno == cparser.HPE_INVALID_STATUS: + * cls = HttpParserInvalidStatusError # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_METHOD: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidStatusError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 378, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":377 + * cls = HttpParserCallbackError + * + * elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<< + * cls = HttpParserInvalidStatusError + * + */ + break; + case HPE_INVALID_METHOD: + + /* "httptools/parser/parser.pyx":381 + * + * elif errno == cparser.HPE_INVALID_METHOD: + * cls = HttpParserInvalidMethodError # <<<<<<<<<<<<<< + * + * elif errno == cparser.HPE_INVALID_URL: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidMethodError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 381, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":380 + * cls = HttpParserInvalidStatusError + * + * elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<< + * cls = HttpParserInvalidMethodError + * + */ + break; + case HPE_INVALID_URL: + + /* "httptools/parser/parser.pyx":384 + * + * elif errno == cparser.HPE_INVALID_URL: + * cls = HttpParserInvalidURLError # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 384, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + + /* "httptools/parser/parser.pyx":383 + * cls = HttpParserInvalidMethodError + * + * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< + * cls = HttpParserInvalidURLError + * + */ + break; + default: + + /* "httptools/parser/parser.pyx":387 + * + * else: + * cls = HttpParserError # <<<<<<<<<<<<<< + * + * return cls(reason.decode('latin-1')) + */ + __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 387, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_cls = __pyx_t_1; + __pyx_t_1 = 0; + break; + } + + /* "httptools/parser/parser.pyx":389 + * cls = HttpParserError + * + * return cls(reason.decode('latin-1')) # <<<<<<<<<<<<<< + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_reason, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_cls); + __pyx_t_3 = __pyx_v_cls; __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + __pyx_t_1 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_t_2) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2); + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 389, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "httptools/parser/parser.pyx":366 + * + * + * cdef parser_error_from_errno(cparser.llhttp_t* parser, cparser.llhttp_errno_t errno): # <<<<<<<<<<<<<< + * cdef bytes reason = cparser.llhttp_get_error_reason(parser) + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("httptools.parser.parser.parser_error_from_errno", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_reason); + __Pyx_XDECREF(__pyx_v_cls); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser __pyx_vtable_9httptools_6parser_6parser_HttpParser; + +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o); + p->__pyx_vtab = __pyx_vtabptr_9httptools_6parser_6parser_HttpParser; + p->_current_header_name = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_current_header_value = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->_proto_on_url = Py_None; Py_INCREF(Py_None); + p->_proto_on_status = Py_None; Py_INCREF(Py_None); + p->_proto_on_body = Py_None; Py_INCREF(Py_None); + p->_proto_on_header = Py_None; Py_INCREF(Py_None); + p->_proto_on_headers_complete = Py_None; Py_INCREF(Py_None); + p->_proto_on_message_complete = Py_None; Py_INCREF(Py_None); + p->_proto_on_chunk_header = Py_None; Py_INCREF(Py_None); + p->_proto_on_chunk_complete = Py_None; Py_INCREF(Py_None); + p->_proto_on_message_begin = Py_None; Py_INCREF(Py_None); + p->_last_error = Py_None; Py_INCREF(Py_None); + p->py_buf.obj = NULL; + if (unlikely(__pyx_pw_9httptools_6parser_6parser_10HttpParser_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser(PyObject *o) { + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + { + PyObject *etype, *eval, *etb; + PyErr_Fetch(&etype, &eval, &etb); + __Pyx_SET_REFCNT(o, Py_REFCNT(o) + 1); + __pyx_pw_9httptools_6parser_6parser_10HttpParser_3__dealloc__(o); + __Pyx_SET_REFCNT(o, Py_REFCNT(o) - 1); + PyErr_Restore(etype, eval, etb); + } + Py_CLEAR(p->_current_header_name); + Py_CLEAR(p->_current_header_value); + Py_CLEAR(p->_proto_on_url); + Py_CLEAR(p->_proto_on_status); + Py_CLEAR(p->_proto_on_body); + Py_CLEAR(p->_proto_on_header); + Py_CLEAR(p->_proto_on_headers_complete); + Py_CLEAR(p->_proto_on_message_complete); + Py_CLEAR(p->_proto_on_chunk_header); + Py_CLEAR(p->_proto_on_chunk_complete); + Py_CLEAR(p->_proto_on_message_begin); + Py_CLEAR(p->_last_error); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_9httptools_6parser_6parser_HttpParser(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o; + if (p->_proto_on_url) { + e = (*v)(p->_proto_on_url, a); if (e) return e; + } + if (p->_proto_on_status) { + e = (*v)(p->_proto_on_status, a); if (e) return e; + } + if (p->_proto_on_body) { + e = (*v)(p->_proto_on_body, a); if (e) return e; + } + if (p->_proto_on_header) { + e = (*v)(p->_proto_on_header, a); if (e) return e; + } + if (p->_proto_on_headers_complete) { + e = (*v)(p->_proto_on_headers_complete, a); if (e) return e; + } + if (p->_proto_on_message_complete) { + e = (*v)(p->_proto_on_message_complete, a); if (e) return e; + } + if (p->_proto_on_chunk_header) { + e = (*v)(p->_proto_on_chunk_header, a); if (e) return e; + } + if (p->_proto_on_chunk_complete) { + e = (*v)(p->_proto_on_chunk_complete, a); if (e) return e; + } + if (p->_proto_on_message_begin) { + e = (*v)(p->_proto_on_message_begin, a); if (e) return e; + } + if (p->_last_error) { + e = (*v)(p->_last_error, a); if (e) return e; + } + if (p->py_buf.obj) { + e = (*v)(p->py_buf.obj, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9httptools_6parser_6parser_HttpParser(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9httptools_6parser_6parser_HttpParser *p = (struct __pyx_obj_9httptools_6parser_6parser_HttpParser *)o; + tmp = ((PyObject*)p->_proto_on_url); + p->_proto_on_url = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_status); + p->_proto_on_status = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_body); + p->_proto_on_body = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_header); + p->_proto_on_header = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_headers_complete); + p->_proto_on_headers_complete = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_message_complete); + p->_proto_on_message_complete = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_chunk_header); + p->_proto_on_chunk_header = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_chunk_complete); + p->_proto_on_chunk_complete = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_proto_on_message_begin); + p->_proto_on_message_begin = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->_last_error); + p->_last_error = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + Py_CLEAR(p->py_buf.obj); + return 0; +} + +static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpParser[] = { + {"get_http_version", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_5get_http_version, METH_NOARGS, 0}, + {"should_keep_alive", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_7should_keep_alive, METH_NOARGS, 0}, + {"should_upgrade", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_9should_upgrade, METH_NOARGS, 0}, + {"feed_data", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_11feed_data, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_13__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_10HttpParser_15__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpParser = { + PyVarObject_HEAD_INIT(0, 0) + "httptools.parser.parser.HttpParser", /*tp_name*/ + sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9httptools_6parser_6parser_HttpParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9httptools_6parser_6parser_HttpParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpRequestParser __pyx_vtable_9httptools_6parser_6parser_HttpRequestParser; + +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *p; + PyObject *o = __pyx_tp_new_9httptools_6parser_6parser_HttpParser(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser*)__pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser; + return o; +} + +static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpRequestParser[] = { + {"get_method", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_3get_method, METH_NOARGS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpRequestParser = { + PyVarObject_HEAD_INIT(0, 0) + "httptools.parser.parser.HttpRequestParser", /*tp_name*/ + sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpRequestParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9httptools_6parser_6parser_HttpRequestParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_9httptools_6parser_6parser_17HttpRequestParser_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9httptools_6parser_6parser_HttpRequestParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; +static struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpResponseParser __pyx_vtable_9httptools_6parser_6parser_HttpResponseParser; + +static PyObject *__pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *p; + PyObject *o = __pyx_tp_new_9httptools_6parser_6parser_HttpParser(t, a, k); + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser *)o); + p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_9httptools_6parser_6parser_HttpParser*)__pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser; + return o; +} + +static PyMethodDef __pyx_methods_9httptools_6parser_6parser_HttpResponseParser[] = { + {"get_status_code", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_3get_status_code, METH_NOARGS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9httptools_6parser_6parser_HttpResponseParser = { + PyVarObject_HEAD_INIT(0, 0) + "httptools.parser.parser.HttpResponseParser", /*tp_name*/ + sizeof(struct __pyx_obj_9httptools_6parser_6parser_HttpResponseParser), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9httptools_6parser_6parser_HttpParser, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9httptools_6parser_6parser_HttpParser, /*tp_traverse*/ + __pyx_tp_clear_9httptools_6parser_6parser_HttpParser, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9httptools_6parser_6parser_HttpResponseParser, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_9httptools_6parser_6parser_18HttpResponseParser_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9httptools_6parser_6parser_HttpResponseParser, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_parser(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_parser}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "parser", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_u_, __pyx_k_, sizeof(__pyx_k_), 0, 1, 0, 0}, + {&__pyx_n_s_BaseException, __pyx_k_BaseException, sizeof(__pyx_k_BaseException), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserCallbackError, __pyx_k_HttpParserCallbackError, sizeof(__pyx_k_HttpParserCallbackError), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserError, __pyx_k_HttpParserError, sizeof(__pyx_k_HttpParserError), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserInvalidMethodError, __pyx_k_HttpParserInvalidMethodError, sizeof(__pyx_k_HttpParserInvalidMethodError), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserInvalidStatusError, __pyx_k_HttpParserInvalidStatusError, sizeof(__pyx_k_HttpParserInvalidStatusError), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserInvalidURLError, __pyx_k_HttpParserInvalidURLError, sizeof(__pyx_k_HttpParserInvalidURLError), 0, 0, 1, 1}, + {&__pyx_n_s_HttpParserUpgrade, __pyx_k_HttpParserUpgrade, sizeof(__pyx_k_HttpParserUpgrade), 0, 0, 1, 1}, + {&__pyx_n_s_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 0, 1, 1}, + {&__pyx_n_u_HttpRequestParser, __pyx_k_HttpRequestParser, sizeof(__pyx_k_HttpRequestParser), 0, 1, 0, 1}, + {&__pyx_n_s_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 0, 1, 1}, + {&__pyx_n_u_HttpResponseParser, __pyx_k_HttpResponseParser, sizeof(__pyx_k_HttpResponseParser), 0, 1, 0, 1}, + {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_context, __pyx_k_context, sizeof(__pyx_k_context), 0, 0, 1, 1}, + {&__pyx_n_s_errors, __pyx_k_errors, sizeof(__pyx_k_errors), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_kp_u_invalid_headers_state, __pyx_k_invalid_headers_state, sizeof(__pyx_k_invalid_headers_state), 0, 1, 0, 0}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, + {&__pyx_n_u_on_body, __pyx_k_on_body, sizeof(__pyx_k_on_body), 0, 1, 0, 1}, + {&__pyx_n_u_on_chunk_complete, __pyx_k_on_chunk_complete, sizeof(__pyx_k_on_chunk_complete), 0, 1, 0, 1}, + {&__pyx_n_u_on_chunk_header, __pyx_k_on_chunk_header, sizeof(__pyx_k_on_chunk_header), 0, 1, 0, 1}, + {&__pyx_n_u_on_header, __pyx_k_on_header, sizeof(__pyx_k_on_header), 0, 1, 0, 1}, + {&__pyx_n_u_on_headers_complete, __pyx_k_on_headers_complete, sizeof(__pyx_k_on_headers_complete), 0, 1, 0, 1}, + {&__pyx_n_u_on_message_begin, __pyx_k_on_message_begin, sizeof(__pyx_k_on_message_begin), 0, 1, 0, 1}, + {&__pyx_n_u_on_message_complete, __pyx_k_on_message_complete, sizeof(__pyx_k_on_message_complete), 0, 1, 0, 1}, + {&__pyx_n_u_on_status, __pyx_k_on_status, sizeof(__pyx_k_on_status), 0, 1, 0, 1}, + {&__pyx_n_u_on_url, __pyx_k_on_url, sizeof(__pyx_k_on_url), 0, 1, 0, 1}, + {&__pyx_n_s_protocol, __pyx_k_protocol, sizeof(__pyx_k_protocol), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 48, __pyx_L1_error) + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error) + __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 247, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(1, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "httptools/parser/parser.pyx":22 + * + * + * __all__ = ('HttpRequestParser', 'HttpResponseParser') # <<<<<<<<<<<<<< + * + * + */ + __pyx_tuple__8 = PyTuple_Pack(2, __pyx_n_u_HttpRequestParser, __pyx_n_u_HttpResponseParser); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 22, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + __pyx_vtabptr_9httptools_6parser_6parser_HttpParser = &__pyx_vtable_9httptools_6parser_6parser_HttpParser; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *, llhttp_type_t))__pyx_f_9httptools_6parser_6parser_10HttpParser__init; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._maybe_call_on_header = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__maybe_call_on_header; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._on_header_field = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_field; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._on_header_value = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *, PyObject *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_header_value; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._on_headers_complete = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_headers_complete; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._on_chunk_header = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_header; + __pyx_vtable_9httptools_6parser_6parser_HttpParser._on_chunk_complete = (PyObject *(*)(struct __pyx_obj_9httptools_6parser_6parser_HttpParser *))__pyx_f_9httptools_6parser_6parser_10HttpParser__on_chunk_complete; + if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_9httptools_6parser_6parser_HttpParser.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_9httptools_6parser_6parser_HttpParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpParser) < 0) __PYX_ERR(0, 26, __pyx_L1_error) + __pyx_ptype_9httptools_6parser_6parser_HttpParser = &__pyx_type_9httptools_6parser_6parser_HttpParser; + __pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser = &__pyx_vtable_9httptools_6parser_6parser_HttpRequestParser; + __pyx_vtable_9httptools_6parser_6parser_HttpRequestParser.__pyx_base = *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser; + __pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_base = __pyx_ptype_9httptools_6parser_6parser_HttpParser; + if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpRequestParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpRequestParser, (PyObject *)&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpRequestParser) < 0) __PYX_ERR(0, 215, __pyx_L1_error) + __pyx_ptype_9httptools_6parser_6parser_HttpRequestParser = &__pyx_type_9httptools_6parser_6parser_HttpRequestParser; + __pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser = &__pyx_vtable_9httptools_6parser_6parser_HttpResponseParser; + __pyx_vtable_9httptools_6parser_6parser_HttpResponseParser.__pyx_base = *__pyx_vtabptr_9httptools_6parser_6parser_HttpParser; + __pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_base = __pyx_ptype_9httptools_6parser_6parser_HttpParser; + if (PyType_Ready(&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_dictoffset && __pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (__Pyx_SetVtable(__pyx_type_9httptools_6parser_6parser_HttpResponseParser.tp_dict, __pyx_vtabptr_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error) + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_HttpResponseParser, (PyObject *)&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_6parser_HttpResponseParser) < 0) __PYX_ERR(0, 229, __pyx_L1_error) + __pyx_ptype_9httptools_6parser_6parser_HttpResponseParser = &__pyx_type_9httptools_6parser_6parser_HttpResponseParser; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initparser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initparser(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_parser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_parser(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_parser(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'parser' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_parser(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_b); + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_cython_runtime); + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_httptools__parser__parser) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "httptools.parser.parser")) { + if (unlikely(PyDict_SetItemString(modules, "httptools.parser.parser", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "httptools/parser/parser.pyx":11 + * + * + * from .errors import (HttpParserError, # <<<<<<<<<<<<<< + * HttpParserCallbackError, + * HttpParserInvalidStatusError, + */ + __pyx_t_1 = PyList_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_HttpParserError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpParserError); + __Pyx_INCREF(__pyx_n_s_HttpParserCallbackError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserCallbackError); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_HttpParserCallbackError); + __Pyx_INCREF(__pyx_n_s_HttpParserInvalidStatusError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidStatusError); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_HttpParserInvalidStatusError); + __Pyx_INCREF(__pyx_n_s_HttpParserInvalidMethodError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidMethodError); + PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_HttpParserInvalidMethodError); + __Pyx_INCREF(__pyx_n_s_HttpParserInvalidURLError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidURLError); + PyList_SET_ITEM(__pyx_t_1, 4, __pyx_n_s_HttpParserInvalidURLError); + __Pyx_INCREF(__pyx_n_s_HttpParserUpgrade); + __Pyx_GIVEREF(__pyx_n_s_HttpParserUpgrade); + PyList_SET_ITEM(__pyx_t_1, 5, __pyx_n_s_HttpParserUpgrade); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_errors, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserError, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserCallbackError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserCallbackError, __pyx_t_1) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidStatusError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidStatusError, __pyx_t_1) < 0) __PYX_ERR(0, 13, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidMethodError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidMethodError, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidURLError, __pyx_t_1) < 0) __PYX_ERR(0, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserUpgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserUpgrade, __pyx_t_1) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "httptools/parser/parser.pyx":22 + * + * + * __all__ = ('HttpRequestParser', 'HttpResponseParser') # <<<<<<<<<<<<<< + * + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__8) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + + /* "httptools/parser/parser.pyx":1 + * #cython: language_level=3 # <<<<<<<<<<<<<< + * + * from __future__ import print_function + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init httptools.parser.parser", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init httptools.parser.parser"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* KeywordStringCheck */ +static int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* GetAttr */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_USE_TYPE_SLOTS +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ +static PyObject *__Pyx_GetAttr3Default(PyObject *d) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + __Pyx_PyErr_Clear(); + Py_INCREF(d); + return d; +} +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + return (likely(r)) ? r : __Pyx_GetAttr3Default(d); +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallNoArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) +#else + if (likely(PyCFunction_Check(func))) +#endif + { + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (__Pyx_PyFastCFunction_Check(func)) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyObjectSetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#endif + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* decode_c_bytes */ +static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( + const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, + const char* encoding, const char* errors, + PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { + if (unlikely((start < 0) | (stop < 0))) { + if (start < 0) { + start += length; + if (start < 0) + start = 0; + } + if (stop < 0) + stop += length; + } + if (stop > length) + stop = length; + if (unlikely(stop <= start)) + return __Pyx_NewRef(__pyx_empty_unicode); + length = stop - start; + cstring += start; + if (decode_func) { + return decode_func(cstring, length, errors); + } else { + return PyUnicode_Decode(cstring, length, encoding, errors); + } +} + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* SetVTable */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* PyObjectGetAttrStrNoError */ +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint8_t(uint8_t value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const uint8_t neg_one = (uint8_t) -1, const_zero = (uint8_t) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(uint8_t) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(uint8_t) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint8_t) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(uint8_t) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint8_t) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(uint8_t), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_ptrdiff_t(ptrdiff_t value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const ptrdiff_t neg_one = (ptrdiff_t) -1, const_zero = (ptrdiff_t) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(ptrdiff_t) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(ptrdiff_t) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(ptrdiff_t) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(ptrdiff_t) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(ptrdiff_t) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(ptrdiff_t), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const uint16_t neg_one = (uint16_t) -1, const_zero = (uint16_t) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(uint16_t) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(uint16_t) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(uint16_t) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(uint16_t), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.cp37-win_amd64.pyd new file mode 100644 index 00000000..7967a492 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/parser.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.c b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.c new file mode 100644 index 00000000..de5d5b4c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.c @@ -0,0 +1,6031 @@ +/* Generated by Cython 0.29.28 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "extra_compile_args": [ + "-O2" + ], + "name": "httptools.parser.url_parser", + "sources": [ + "httptools/parser/url_parser.pyx" + ] + }, + "module_name": "httptools.parser.url_parser" +} +END: Cython Metadata */ + +#ifndef PY_SSIZE_T_CLEAN +#define PY_SSIZE_T_CLEAN +#endif /* PY_SSIZE_T_CLEAN */ +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) + #error Cython requires Python 2.6+ or Python 3.3+. +#else +#define CYTHON_ABI "0_29_28" +#define CYTHON_HEX_VERSION 0x001D1CF0 +#define CYTHON_FUTURE_DIVISION 1 +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x02070000 + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #if PY_VERSION_HEX < 0x03050000 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 + #undef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT 0 + #undef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE 0 + #undef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS 0 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYTYPE_LOOKUP + #define CYTHON_USE_PYTYPE_LOOKUP 0 + #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) + #define CYTHON_USE_PYTYPE_LOOKUP 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #elif !defined(CYTHON_FAST_THREAD_STATE) + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) + #endif + #ifndef CYTHON_PEP489_MULTI_PHASE_INIT + #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) + #endif + #ifndef CYTHON_USE_TP_FINALIZE + #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) + #endif + #ifndef CYTHON_USE_DICT_VERSIONS + #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) + #endif + #if PY_VERSION_HEX >= 0x030B00A4 + #undef CYTHON_USE_EXC_INFO_STACK + #define CYTHON_USE_EXC_INFO_STACK 0 + #elif !defined(CYTHON_USE_EXC_INFO_STACK) + #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif + #undef SHIFT + #undef BASE + #undef MASK + #ifdef SIZEOF_VOID_P + enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; + #endif +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #if defined(__cplusplus) && __cplusplus >= 201103L + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #elif __has_cpp_attribute(gnu::fallthrough) + #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif + #if defined(__clang__ ) && defined(__apple_build_version__) + #if __apple_build_version__ < 7000000 + #undef CYTHON_FALLTHROUGH + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } +#else + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) +#endif + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_STACKLESS + #define METH_STACKLESS 0 +#endif +#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 + #define PyMem_RawMalloc(n) PyMem_Malloc(n) + #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) + #define PyMem_RawFree(p) PyMem_Free(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#elif PY_VERSION_HEX >= 0x03060000 + #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() +#elif PY_VERSION_HEX >= 0x03000000 + #define __Pyx_PyThreadState_Current PyThreadState_GET() +#else + #define __Pyx_PyThreadState_Current _PyThreadState_Current +#endif +#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) +#include "pythread.h" +#define Py_tss_NEEDS_INIT 0 +typedef int Py_tss_t; +static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { + *key = PyThread_create_key(); + return 0; +} +static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { + Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); + *key = Py_tss_NEEDS_INIT; + return key; +} +static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { + PyObject_Free(key); +} +static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { + return *key != Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { + PyThread_delete_key(*key); + *key = Py_tss_NEEDS_INIT; +} +static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { + return PyThread_set_key_value(*key, value); +} +static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { + return PyThread_get_key_value(*key); +} +#endif +#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) +#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) +#else +#define __Pyx_PyDict_NewPresized(n) PyDict_New() +#endif +#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS +#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) +#else +#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #if defined(PyUnicode_IS_READY) + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #else + #define __Pyx_PyUnicode_READY(op) (0) + #endif + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) + #if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000 + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length)) + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) + #endif + #else + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) + #endif +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#ifndef PyObject_Unicode + #define PyObject_Unicode PyObject_Str +#endif +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#if PY_VERSION_HEX >= 0x030900A4 + #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) +#else + #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) + #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) +#endif +#if CYTHON_ASSUME_SAFE_MACROS + #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) +#else + #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef __Pyx_PyAsyncMethodsStruct + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + +#define __PYX_MARK_ERR_POS(f_index, lineno) \ + { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } +#define __PYX_ERR(f_index, lineno, Ln_error) \ + { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE__httptools__parser__url_parser +#define __PYX_HAVE_API__httptools__parser__url_parser +/* Early includes */ +#include +#include +#include "pythread.h" +#include +#include "http_parser.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { + return (size_t) i < (size_t) limit; +} +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) + #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +#define __Pyx_PySequence_Tuple(obj)\ + (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m = NULL; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime = NULL; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "stringsource", + "httptools\\parser\\url_parser.pyx", + "type.pxd", + "bool.pxd", + "complex.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_9httptools_6parser_10url_parser_URL; + +/* "httptools/parser/url_parser.pyx":16 + * + * @cython.freelist(250) + * cdef class URL: # <<<<<<<<<<<<<< + * cdef readonly bytes schema + * cdef readonly bytes host + */ +struct __pyx_obj_9httptools_6parser_10url_parser_URL { + PyObject_HEAD + PyObject *schema; + PyObject *host; + PyObject *port; + PyObject *path; + PyObject *query; + PyObject *fragment; + PyObject *userinfo; +}; + + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\ + ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\ + __Pyx__ArgTypeTest(obj, type, name, exact)) +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact); + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#define __Pyx_BUILD_ASSERT_EXPR(cond)\ + (sizeof(char [1 - 2*!(cond)]) - 1) +#ifndef Py_MEMBER_SIZE +#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) +#endif +#if CYTHON_FAST_PYCALL + static size_t __pyx_pyframe_localsplus_offset = 0; + #include "frameobject.h" + #define __Pxy_PyFrame_Initialize_Offsets()\ + ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ + (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) + #define __Pyx_PyFrame_GetLocalsplus(frame)\ + (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL +#endif + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; +#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#define __Pyx_PyErr_Occurred() PyErr_Occurred() +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) +#else +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#endif +#else +#define __Pyx_PyErr_Clear() PyErr_Clear() +#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyDictVersioning.proto */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) +#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ + (version_var) = __PYX_GET_DICT_VERSION(dict);\ + (cache_var) = (value); +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ + (VAR) = __pyx_dict_cached_value;\ + } else {\ + (VAR) = __pyx_dict_cached_value = (LOOKUP);\ + __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ + }\ +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); +#else +#define __PYX_GET_DICT_VERSION(dict) (0) +#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) +#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); +#endif + +/* GetModuleGlobalName.proto */ +#if CYTHON_USE_DICT_VERSIONS +#define __Pyx_GetModuleGlobalName(var, name) {\ + static PY_UINT64_T __pyx_dict_version = 0;\ + static PyObject *__pyx_dict_cached_value = NULL;\ + (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ + (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ + __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ + PY_UINT64_T __pyx_dict_version;\ + PyObject *__pyx_dict_cached_value;\ + (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ +} +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); +#else +#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) +#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); +#endif + +/* PyObjectCall2Args.proto */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetTopmostException.proto */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate); +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* IncludeStringH.proto */ +#include + +/* PyObject_GenericGetAttrNoDict.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr +#endif + +/* PyObject_GenericGetAttr.proto */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); +#else +#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr +#endif + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* PyObjectGetAttrStrNoError.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* TypeImport.proto */ +#ifndef __PYX_HAVE_RT_ImportType_proto +#define __PYX_HAVE_RT_ImportType_proto +enum __Pyx_ImportType_CheckSize { + __Pyx_ImportType_CheckSize_Error = 0, + __Pyx_ImportType_CheckSize_Warn = 1, + __Pyx_ImportType_CheckSize_Ignore = 2 +}; +static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); +#endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* CLineInTraceback.proto */ +#ifdef CYTHON_CLINE_IN_TRACEBACK +#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) +#else +static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); +#endif + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* GCCDiagnostics.proto */ +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)) +#define __Pyx_HAS_GCC_DIAGNOSTIC +#endif + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* FastTypeChecks.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); +static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); +#else +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) +#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) +#endif +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from 'cpython.version' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from 'cpython.exc' */ + +/* Module declarations from 'cpython.module' */ + +/* Module declarations from 'cpython.tuple' */ + +/* Module declarations from 'cpython.list' */ + +/* Module declarations from 'cpython.sequence' */ + +/* Module declarations from 'cpython.mapping' */ + +/* Module declarations from 'cpython.iterator' */ + +/* Module declarations from 'cpython.number' */ + +/* Module declarations from 'cpython.int' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.bool' */ +static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; + +/* Module declarations from 'cpython.long' */ + +/* Module declarations from 'cpython.float' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.complex' */ +static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; + +/* Module declarations from 'cpython.string' */ + +/* Module declarations from 'cpython.unicode' */ + +/* Module declarations from 'cpython.dict' */ + +/* Module declarations from 'cpython.instance' */ + +/* Module declarations from 'cpython.function' */ + +/* Module declarations from 'cpython.method' */ + +/* Module declarations from 'cpython.weakref' */ + +/* Module declarations from 'cpython.getargs' */ + +/* Module declarations from 'cpython.pythread' */ + +/* Module declarations from 'cpython.pystate' */ + +/* Module declarations from 'cpython.cobject' */ + +/* Module declarations from 'cpython.oldbuffer' */ + +/* Module declarations from 'cpython.set' */ + +/* Module declarations from 'cpython.buffer' */ + +/* Module declarations from 'cpython.bytes' */ + +/* Module declarations from 'cpython.pycapsule' */ + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cython' */ + +/* Module declarations from 'httptools.parser' */ + +/* Module declarations from 'libc.stdint' */ + +/* Module declarations from 'httptools.parser.url_cparser' */ + +/* Module declarations from 'httptools.parser.url_parser' */ +static PyTypeObject *__pyx_ptype_9httptools_6parser_10url_parser_URL = 0; +#define __Pyx_MODULE_NAME "httptools.parser.url_parser" +extern int __pyx_module_is_main_httptools__parser__url_parser; +int __pyx_module_is_main_httptools__parser__url_parser = 0; + +/* Implementation of 'httptools.parser.url_parser' */ +static PyObject *__pyx_builtin_TypeError; +static const char __pyx_k_ln[] = "ln"; +static const char __pyx_k_URL[] = "URL"; +static const char __pyx_k_all[] = "__all__"; +static const char __pyx_k_off[] = "off"; +static const char __pyx_k_res[] = "res"; +static const char __pyx_k_url[] = "url"; +static const char __pyx_k_host[] = "host"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_name[] = "__name__"; +static const char __pyx_k_path[] = "path"; +static const char __pyx_k_port[] = "port"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_query[] = "query"; +static const char __pyx_k_errors[] = "errors"; +static const char __pyx_k_format[] = "format"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_parsed[] = "parsed"; +static const char __pyx_k_py_buf[] = "py_buf"; +static const char __pyx_k_reduce[] = "__reduce__"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_schema[] = "schema"; +static const char __pyx_k_buf_data[] = "buf_data"; +static const char __pyx_k_fragment[] = "fragment"; +static const char __pyx_k_getstate[] = "__getstate__"; +static const char __pyx_k_setstate[] = "__setstate__"; +static const char __pyx_k_userinfo[] = "userinfo"; +static const char __pyx_k_TypeError[] = "TypeError"; +static const char __pyx_k_parse_url[] = "parse_url"; +static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; +static const char __pyx_k_invalid_url_r[] = "invalid url {!r}"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_HttpParserInvalidURLError[] = "HttpParserInvalidURLError"; +static const char __pyx_k_httptools_parser_url_parser[] = "httptools.parser.url_parser"; +static const char __pyx_k_URL_schema_r_host_r_port_r_path[] = ""; +static const char __pyx_k_httptools_parser_url_parser_pyx[] = "httptools\\parser\\url_parser.pyx"; +static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; +static PyObject *__pyx_n_s_HttpParserInvalidURLError; +static PyObject *__pyx_n_s_TypeError; +static PyObject *__pyx_n_s_URL; +static PyObject *__pyx_kp_u_URL_schema_r_host_r_port_r_path; +static PyObject *__pyx_n_s_all; +static PyObject *__pyx_n_s_buf_data; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_errors; +static PyObject *__pyx_n_s_format; +static PyObject *__pyx_n_s_fragment; +static PyObject *__pyx_n_s_getstate; +static PyObject *__pyx_n_s_host; +static PyObject *__pyx_n_s_httptools_parser_url_parser; +static PyObject *__pyx_kp_s_httptools_parser_url_parser_pyx; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_kp_u_invalid_url_r; +static PyObject *__pyx_n_s_ln; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_name; +static PyObject *__pyx_kp_s_no_default___reduce___due_to_non; +static PyObject *__pyx_n_s_off; +static PyObject *__pyx_n_s_parse_url; +static PyObject *__pyx_n_u_parse_url; +static PyObject *__pyx_n_s_parsed; +static PyObject *__pyx_n_s_path; +static PyObject *__pyx_n_s_port; +static PyObject *__pyx_n_s_py_buf; +static PyObject *__pyx_n_s_query; +static PyObject *__pyx_n_s_reduce; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_reduce_ex; +static PyObject *__pyx_n_s_res; +static PyObject *__pyx_n_s_result; +static PyObject *__pyx_n_s_schema; +static PyObject *__pyx_n_s_setstate; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_url; +static PyObject *__pyx_n_s_userinfo; +static int __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, PyObject *__pyx_v_schema, PyObject *__pyx_v_host, PyObject *__pyx_v_port, PyObject *__pyx_v_path, PyObject *__pyx_v_query, PyObject *__pyx_v_fragment, PyObject *__pyx_v_userinfo); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url); /* proto */ +static PyObject *__pyx_tp_new_9httptools_6parser_10url_parser_URL(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tuple_; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_codeobj__5; +/* Late includes */ + +/* "httptools/parser/url_parser.pyx":25 + * cdef readonly bytes userinfo + * + * def __cinit__(self, bytes schema, bytes host, object port, bytes path, # <<<<<<<<<<<<<< + * bytes query, bytes fragment, bytes userinfo): + * + */ + +/* Python wrapper */ +static int __pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_schema = 0; + PyObject *__pyx_v_host = 0; + PyObject *__pyx_v_port = 0; + PyObject *__pyx_v_path = 0; + PyObject *__pyx_v_query = 0; + PyObject *__pyx_v_fragment = 0; + PyObject *__pyx_v_userinfo = 0; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_schema,&__pyx_n_s_host,&__pyx_n_s_port,&__pyx_n_s_path,&__pyx_n_s_query,&__pyx_n_s_fragment,&__pyx_n_s_userinfo,0}; + PyObject* values[7] = {0,0,0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + CYTHON_FALLTHROUGH; + case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + CYTHON_FALLTHROUGH; + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_schema)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_host)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 1); __PYX_ERR(1, 25, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_port)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 2); __PYX_ERR(1, 25, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_path)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 3); __PYX_ERR(1, 25, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_query)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 4); __PYX_ERR(1, 25, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 5: + if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_fragment)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 5); __PYX_ERR(1, 25, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 6: + if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_userinfo)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 6); __PYX_ERR(1, 25, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(1, 25, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 7) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + values[5] = PyTuple_GET_ITEM(__pyx_args, 5); + values[6] = PyTuple_GET_ITEM(__pyx_args, 6); + } + __pyx_v_schema = ((PyObject*)values[0]); + __pyx_v_host = ((PyObject*)values[1]); + __pyx_v_port = values[2]; + __pyx_v_path = ((PyObject*)values[3]); + __pyx_v_query = ((PyObject*)values[4]); + __pyx_v_fragment = ((PyObject*)values[5]); + __pyx_v_userinfo = ((PyObject*)values[6]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 25, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("httptools.parser.url_parser.URL.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_schema), (&PyBytes_Type), 1, "schema", 1))) __PYX_ERR(1, 25, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_host), (&PyBytes_Type), 1, "host", 1))) __PYX_ERR(1, 25, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_path), (&PyBytes_Type), 1, "path", 1))) __PYX_ERR(1, 25, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_query), (&PyBytes_Type), 1, "query", 1))) __PYX_ERR(1, 26, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_fragment), (&PyBytes_Type), 1, "fragment", 1))) __PYX_ERR(1, 26, __pyx_L1_error) + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_userinfo), (&PyBytes_Type), 1, "userinfo", 1))) __PYX_ERR(1, 26, __pyx_L1_error) + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self), __pyx_v_schema, __pyx_v_host, __pyx_v_port, __pyx_v_path, __pyx_v_query, __pyx_v_fragment, __pyx_v_userinfo); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, PyObject *__pyx_v_schema, PyObject *__pyx_v_host, PyObject *__pyx_v_port, PyObject *__pyx_v_path, PyObject *__pyx_v_query, PyObject *__pyx_v_fragment, PyObject *__pyx_v_userinfo) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__cinit__", 0); + + /* "httptools/parser/url_parser.pyx":28 + * bytes query, bytes fragment, bytes userinfo): + * + * self.schema = schema # <<<<<<<<<<<<<< + * self.host = host + * self.port = port + */ + __Pyx_INCREF(__pyx_v_schema); + __Pyx_GIVEREF(__pyx_v_schema); + __Pyx_GOTREF(__pyx_v_self->schema); + __Pyx_DECREF(__pyx_v_self->schema); + __pyx_v_self->schema = __pyx_v_schema; + + /* "httptools/parser/url_parser.pyx":29 + * + * self.schema = schema + * self.host = host # <<<<<<<<<<<<<< + * self.port = port + * self.path = path + */ + __Pyx_INCREF(__pyx_v_host); + __Pyx_GIVEREF(__pyx_v_host); + __Pyx_GOTREF(__pyx_v_self->host); + __Pyx_DECREF(__pyx_v_self->host); + __pyx_v_self->host = __pyx_v_host; + + /* "httptools/parser/url_parser.pyx":30 + * self.schema = schema + * self.host = host + * self.port = port # <<<<<<<<<<<<<< + * self.path = path + * self.query = query + */ + __Pyx_INCREF(__pyx_v_port); + __Pyx_GIVEREF(__pyx_v_port); + __Pyx_GOTREF(__pyx_v_self->port); + __Pyx_DECREF(__pyx_v_self->port); + __pyx_v_self->port = __pyx_v_port; + + /* "httptools/parser/url_parser.pyx":31 + * self.host = host + * self.port = port + * self.path = path # <<<<<<<<<<<<<< + * self.query = query + * self.fragment = fragment + */ + __Pyx_INCREF(__pyx_v_path); + __Pyx_GIVEREF(__pyx_v_path); + __Pyx_GOTREF(__pyx_v_self->path); + __Pyx_DECREF(__pyx_v_self->path); + __pyx_v_self->path = __pyx_v_path; + + /* "httptools/parser/url_parser.pyx":32 + * self.port = port + * self.path = path + * self.query = query # <<<<<<<<<<<<<< + * self.fragment = fragment + * self.userinfo = userinfo + */ + __Pyx_INCREF(__pyx_v_query); + __Pyx_GIVEREF(__pyx_v_query); + __Pyx_GOTREF(__pyx_v_self->query); + __Pyx_DECREF(__pyx_v_self->query); + __pyx_v_self->query = __pyx_v_query; + + /* "httptools/parser/url_parser.pyx":33 + * self.path = path + * self.query = query + * self.fragment = fragment # <<<<<<<<<<<<<< + * self.userinfo = userinfo + * + */ + __Pyx_INCREF(__pyx_v_fragment); + __Pyx_GIVEREF(__pyx_v_fragment); + __Pyx_GOTREF(__pyx_v_self->fragment); + __Pyx_DECREF(__pyx_v_self->fragment); + __pyx_v_self->fragment = __pyx_v_fragment; + + /* "httptools/parser/url_parser.pyx":34 + * self.query = query + * self.fragment = fragment + * self.userinfo = userinfo # <<<<<<<<<<<<<< + * + * def __repr__(self): + */ + __Pyx_INCREF(__pyx_v_userinfo); + __Pyx_GIVEREF(__pyx_v_userinfo); + __Pyx_GOTREF(__pyx_v_self->userinfo); + __Pyx_DECREF(__pyx_v_self->userinfo); + __pyx_v_self->userinfo = __pyx_v_userinfo; + + /* "httptools/parser/url_parser.pyx":25 + * cdef readonly bytes userinfo + * + * def __cinit__(self, bytes schema, bytes host, object port, bytes path, # <<<<<<<<<<<<<< + * bytes query, bytes fragment, bytes userinfo): + * + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":36 + * self.userinfo = userinfo + * + * def __repr__(self): # <<<<<<<<<<<<<< + * return ('' + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__repr__", 0); + + /* "httptools/parser/url_parser.pyx":37 + * + * def __repr__(self): + * return ('' + * .format(self.schema, self.host, self.port, self.path, + */ + __Pyx_XDECREF(__pyx_r); + + /* "httptools/parser/url_parser.pyx":39 + * return ('' + * .format(self.schema, self.host, self.port, self.path, # <<<<<<<<<<<<<< + * self.query, self.fragment, self.userinfo)) + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_URL_schema_r_host_r_port_r_path, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + + /* "httptools/parser/url_parser.pyx":40 + * 'query: {!r}, fragment: {!r}, userinfo: {!r}>' + * .format(self.schema, self.host, self.port, self.path, + * self.query, self.fragment, self.userinfo)) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_4 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_self->schema, __pyx_v_self->host, __pyx_v_self->port, __pyx_v_self->path, __pyx_v_self->query, __pyx_v_self->fragment, __pyx_v_self->userinfo}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 7+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_self->schema, __pyx_v_self->host, __pyx_v_self->port, __pyx_v_self->path, __pyx_v_self->query, __pyx_v_self->fragment, __pyx_v_self->userinfo}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 7+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_5 = PyTuple_New(7+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_self->schema); + __Pyx_GIVEREF(__pyx_v_self->schema); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_self->schema); + __Pyx_INCREF(__pyx_v_self->host); + __Pyx_GIVEREF(__pyx_v_self->host); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_self->host); + __Pyx_INCREF(__pyx_v_self->port); + __Pyx_GIVEREF(__pyx_v_self->port); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_self->port); + __Pyx_INCREF(__pyx_v_self->path); + __Pyx_GIVEREF(__pyx_v_self->path); + PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_4, __pyx_v_self->path); + __Pyx_INCREF(__pyx_v_self->query); + __Pyx_GIVEREF(__pyx_v_self->query); + PyTuple_SET_ITEM(__pyx_t_5, 4+__pyx_t_4, __pyx_v_self->query); + __Pyx_INCREF(__pyx_v_self->fragment); + __Pyx_GIVEREF(__pyx_v_self->fragment); + PyTuple_SET_ITEM(__pyx_t_5, 5+__pyx_t_4, __pyx_v_self->fragment); + __Pyx_INCREF(__pyx_v_self->userinfo); + __Pyx_GIVEREF(__pyx_v_self->userinfo); + PyTuple_SET_ITEM(__pyx_t_5, 6+__pyx_t_4, __pyx_v_self->userinfo); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "httptools/parser/url_parser.pyx":36 + * self.userinfo = userinfo + * + * def __repr__(self): # <<<<<<<<<<<<<< + * return ('' + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("httptools.parser.url_parser.URL.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":17 + * @cython.freelist(250) + * cdef class URL: + * cdef readonly bytes schema # <<<<<<<<<<<<<< + * cdef readonly bytes host + * cdef readonly object port + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->schema); + __pyx_r = __pyx_v_self->schema; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":18 + * cdef class URL: + * cdef readonly bytes schema + * cdef readonly bytes host # <<<<<<<<<<<<<< + * cdef readonly object port + * cdef readonly bytes path + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->host); + __pyx_r = __pyx_v_self->host; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":19 + * cdef readonly bytes schema + * cdef readonly bytes host + * cdef readonly object port # <<<<<<<<<<<<<< + * cdef readonly bytes path + * cdef readonly bytes query + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->port); + __pyx_r = __pyx_v_self->port; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":20 + * cdef readonly bytes host + * cdef readonly object port + * cdef readonly bytes path # <<<<<<<<<<<<<< + * cdef readonly bytes query + * cdef readonly bytes fragment + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->path); + __pyx_r = __pyx_v_self->path; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":21 + * cdef readonly object port + * cdef readonly bytes path + * cdef readonly bytes query # <<<<<<<<<<<<<< + * cdef readonly bytes fragment + * cdef readonly bytes userinfo + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->query); + __pyx_r = __pyx_v_self->query; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":22 + * cdef readonly bytes path + * cdef readonly bytes query + * cdef readonly bytes fragment # <<<<<<<<<<<<<< + * cdef readonly bytes userinfo + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->fragment); + __pyx_r = __pyx_v_self->fragment; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":23 + * cdef readonly bytes query + * cdef readonly bytes fragment + * cdef readonly bytes userinfo # <<<<<<<<<<<<<< + * + * def __cinit__(self, bytes schema, bytes host, object port, bytes path, + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->userinfo); + __pyx_r = __pyx_v_self->userinfo; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 2, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.url_parser.URL.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 4, __pyx_L1_error) + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("httptools.parser.url_parser.URL.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "httptools/parser/url_parser.pyx":43 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url); /*proto*/ +static PyMethodDef __pyx_mdef_9httptools_6parser_10url_parser_1parse_url = {"parse_url", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_1parse_url, METH_O, 0}; +static PyObject *__pyx_pw_9httptools_6parser_10url_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("parse_url (wrapper)", 0); + __pyx_r = __pyx_pf_9httptools_6parser_10url_parser_parse_url(__pyx_self, ((PyObject *)__pyx_v_url)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_9httptools_6parser_10url_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url) { + Py_buffer __pyx_v_py_buf; + char *__pyx_v_buf_data; + struct http_parser_url *__pyx_v_parsed; + int __pyx_v_res; + PyObject *__pyx_v_schema = 0; + PyObject *__pyx_v_host = 0; + PyObject *__pyx_v_port = 0; + PyObject *__pyx_v_path = 0; + PyObject *__pyx_v_query = 0; + PyObject *__pyx_v_fragment = 0; + PyObject *__pyx_v_userinfo = 0; + CYTHON_UNUSED PyObject *__pyx_v_result = 0; + int __pyx_v_off; + int __pyx_v_ln; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + uint16_t __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + char const *__pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("parse_url", 0); + + /* "httptools/parser/url_parser.pyx":49 + * uparser.http_parser_url* parsed + * int res + * bytes schema = None # <<<<<<<<<<<<<< + * bytes host = None + * object port = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_schema = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":50 + * int res + * bytes schema = None + * bytes host = None # <<<<<<<<<<<<<< + * object port = None + * bytes path = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_host = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":51 + * bytes schema = None + * bytes host = None + * object port = None # <<<<<<<<<<<<<< + * bytes path = None + * bytes query = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_port = Py_None; + + /* "httptools/parser/url_parser.pyx":52 + * bytes host = None + * object port = None + * bytes path = None # <<<<<<<<<<<<<< + * bytes query = None + * bytes fragment = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_path = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":53 + * object port = None + * bytes path = None + * bytes query = None # <<<<<<<<<<<<<< + * bytes fragment = None + * bytes userinfo = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_query = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":54 + * bytes path = None + * bytes query = None + * bytes fragment = None # <<<<<<<<<<<<<< + * bytes userinfo = None + * object result = None + */ + __Pyx_INCREF(Py_None); + __pyx_v_fragment = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":55 + * bytes query = None + * bytes fragment = None + * bytes userinfo = None # <<<<<<<<<<<<<< + * object result = None + * int off + */ + __Pyx_INCREF(Py_None); + __pyx_v_userinfo = ((PyObject*)Py_None); + + /* "httptools/parser/url_parser.pyx":56 + * bytes fragment = None + * bytes userinfo = None + * object result = None # <<<<<<<<<<<<<< + * int off + * int ln + */ + __Pyx_INCREF(Py_None); + __pyx_v_result = Py_None; + + /* "httptools/parser/url_parser.pyx":60 + * int ln + * + * parsed = \ # <<<<<<<<<<<<<< + * PyMem_Malloc(sizeof(uparser.http_parser_url)) + * uparser.http_parser_url_init(parsed) + */ + __pyx_v_parsed = ((struct http_parser_url *)PyMem_Malloc((sizeof(struct http_parser_url)))); + + /* "httptools/parser/url_parser.pyx":62 + * parsed = \ + * PyMem_Malloc(sizeof(uparser.http_parser_url)) + * uparser.http_parser_url_init(parsed) # <<<<<<<<<<<<<< + * + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + */ + http_parser_url_init(__pyx_v_parsed); + + /* "httptools/parser/url_parser.pyx":64 + * uparser.http_parser_url_init(parsed) + * + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< + * try: + * buf_data = py_buf.buf + */ + __pyx_t_1 = PyObject_GetBuffer(__pyx_v_url, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(1, 64, __pyx_L1_error) + + /* "httptools/parser/url_parser.pyx":65 + * + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + * try: # <<<<<<<<<<<<<< + * buf_data = py_buf.buf + * res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) + */ + /*try:*/ { + + /* "httptools/parser/url_parser.pyx":66 + * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) + * try: + * buf_data = py_buf.buf # <<<<<<<<<<<<<< + * res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) + * + */ + __pyx_v_buf_data = ((char *)__pyx_v_py_buf.buf); + + /* "httptools/parser/url_parser.pyx":67 + * try: + * buf_data = py_buf.buf + * res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) # <<<<<<<<<<<<<< + * + * if res == 0: + */ + __pyx_v_res = http_parser_parse_url(__pyx_v_buf_data, __pyx_v_py_buf.len, 0, __pyx_v_parsed); + + /* "httptools/parser/url_parser.pyx":69 + * res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) + * + * if res == 0: # <<<<<<<<<<<<<< + * if parsed.field_set & (1 << uparser.UF_SCHEMA): + * off = parsed.field_data[uparser.UF_SCHEMA].off + */ + __pyx_t_2 = ((__pyx_v_res == 0) != 0); + if (likely(__pyx_t_2)) { + + /* "httptools/parser/url_parser.pyx":70 + * + * if res == 0: + * if parsed.field_set & (1 << uparser.UF_SCHEMA): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_SCHEMA].off + * ln = parsed.field_data[uparser.UF_SCHEMA].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_SCHEMA)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":71 + * if res == 0: + * if parsed.field_set & (1 << uparser.UF_SCHEMA): + * off = parsed.field_data[uparser.UF_SCHEMA].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_SCHEMA].len + * schema = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":72 + * if parsed.field_set & (1 << uparser.UF_SCHEMA): + * off = parsed.field_data[uparser.UF_SCHEMA].off + * ln = parsed.field_data[uparser.UF_SCHEMA].len # <<<<<<<<<<<<<< + * schema = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":73 + * off = parsed.field_data[uparser.UF_SCHEMA].off + * ln = parsed.field_data[uparser.UF_SCHEMA].len + * schema = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_HOST): + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 73, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_schema, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":70 + * + * if res == 0: + * if parsed.field_set & (1 << uparser.UF_SCHEMA): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_SCHEMA].off + * ln = parsed.field_data[uparser.UF_SCHEMA].len + */ + } + + /* "httptools/parser/url_parser.pyx":75 + * schema = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_HOST): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_HOST].off + * ln = parsed.field_data[uparser.UF_HOST].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_HOST)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":76 + * + * if parsed.field_set & (1 << uparser.UF_HOST): + * off = parsed.field_data[uparser.UF_HOST].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_HOST].len + * host = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":77 + * if parsed.field_set & (1 << uparser.UF_HOST): + * off = parsed.field_data[uparser.UF_HOST].off + * ln = parsed.field_data[uparser.UF_HOST].len # <<<<<<<<<<<<<< + * host = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":78 + * off = parsed.field_data[uparser.UF_HOST].off + * ln = parsed.field_data[uparser.UF_HOST].len + * host = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_PORT): + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 78, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_host, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":75 + * schema = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_HOST): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_HOST].off + * ln = parsed.field_data[uparser.UF_HOST].len + */ + } + + /* "httptools/parser/url_parser.pyx":80 + * host = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_PORT): # <<<<<<<<<<<<<< + * port = parsed.port + * + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_PORT)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":81 + * + * if parsed.field_set & (1 << uparser.UF_PORT): + * port = parsed.port # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_PATH): + */ + __pyx_t_4 = __Pyx_PyInt_From_uint16_t(__pyx_v_parsed->port); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 81, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_port, __pyx_t_4); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":80 + * host = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_PORT): # <<<<<<<<<<<<<< + * port = parsed.port + * + */ + } + + /* "httptools/parser/url_parser.pyx":83 + * port = parsed.port + * + * if parsed.field_set & (1 << uparser.UF_PATH): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_PATH].off + * ln = parsed.field_data[uparser.UF_PATH].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_PATH)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":84 + * + * if parsed.field_set & (1 << uparser.UF_PATH): + * off = parsed.field_data[uparser.UF_PATH].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_PATH].len + * path = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":85 + * if parsed.field_set & (1 << uparser.UF_PATH): + * off = parsed.field_data[uparser.UF_PATH].off + * ln = parsed.field_data[uparser.UF_PATH].len # <<<<<<<<<<<<<< + * path = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":86 + * off = parsed.field_data[uparser.UF_PATH].off + * ln = parsed.field_data[uparser.UF_PATH].len + * path = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_QUERY): + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 86, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_path, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":83 + * port = parsed.port + * + * if parsed.field_set & (1 << uparser.UF_PATH): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_PATH].off + * ln = parsed.field_data[uparser.UF_PATH].len + */ + } + + /* "httptools/parser/url_parser.pyx":88 + * path = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_QUERY): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_QUERY].off + * ln = parsed.field_data[uparser.UF_QUERY].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_QUERY)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":89 + * + * if parsed.field_set & (1 << uparser.UF_QUERY): + * off = parsed.field_data[uparser.UF_QUERY].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_QUERY].len + * query = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":90 + * if parsed.field_set & (1 << uparser.UF_QUERY): + * off = parsed.field_data[uparser.UF_QUERY].off + * ln = parsed.field_data[uparser.UF_QUERY].len # <<<<<<<<<<<<<< + * query = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":91 + * off = parsed.field_data[uparser.UF_QUERY].off + * ln = parsed.field_data[uparser.UF_QUERY].len + * query = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_FRAGMENT): + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 91, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_query, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":88 + * path = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_QUERY): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_QUERY].off + * ln = parsed.field_data[uparser.UF_QUERY].len + */ + } + + /* "httptools/parser/url_parser.pyx":93 + * query = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_FRAGMENT): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_FRAGMENT].off + * ln = parsed.field_data[uparser.UF_FRAGMENT].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_FRAGMENT)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":94 + * + * if parsed.field_set & (1 << uparser.UF_FRAGMENT): + * off = parsed.field_data[uparser.UF_FRAGMENT].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_FRAGMENT].len + * fragment = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":95 + * if parsed.field_set & (1 << uparser.UF_FRAGMENT): + * off = parsed.field_data[uparser.UF_FRAGMENT].off + * ln = parsed.field_data[uparser.UF_FRAGMENT].len # <<<<<<<<<<<<<< + * fragment = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":96 + * off = parsed.field_data[uparser.UF_FRAGMENT].off + * ln = parsed.field_data[uparser.UF_FRAGMENT].len + * fragment = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * if parsed.field_set & (1 << uparser.UF_USERINFO): + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 96, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_fragment, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":93 + * query = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_FRAGMENT): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_FRAGMENT].off + * ln = parsed.field_data[uparser.UF_FRAGMENT].len + */ + } + + /* "httptools/parser/url_parser.pyx":98 + * fragment = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_USERINFO): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_USERINFO].off + * ln = parsed.field_data[uparser.UF_USERINFO].len + */ + __pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_USERINFO)) != 0); + if (__pyx_t_2) { + + /* "httptools/parser/url_parser.pyx":99 + * + * if parsed.field_set & (1 << uparser.UF_USERINFO): + * off = parsed.field_data[uparser.UF_USERINFO].off # <<<<<<<<<<<<<< + * ln = parsed.field_data[uparser.UF_USERINFO].len + * userinfo = buf_data[off:off+ln] + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).off; + __pyx_v_off = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":100 + * if parsed.field_set & (1 << uparser.UF_USERINFO): + * off = parsed.field_data[uparser.UF_USERINFO].off + * ln = parsed.field_data[uparser.UF_USERINFO].len # <<<<<<<<<<<<<< + * userinfo = buf_data[off:off+ln] + * + */ + __pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).len; + __pyx_v_ln = __pyx_t_3; + + /* "httptools/parser/url_parser.pyx":101 + * off = parsed.field_data[uparser.UF_USERINFO].off + * ln = parsed.field_data[uparser.UF_USERINFO].len + * userinfo = buf_data[off:off+ln] # <<<<<<<<<<<<<< + * + * return URL(schema, host, port, path, query, fragment, userinfo) + */ + __pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 101, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF_SET(__pyx_v_userinfo, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "httptools/parser/url_parser.pyx":98 + * fragment = buf_data[off:off+ln] + * + * if parsed.field_set & (1 << uparser.UF_USERINFO): # <<<<<<<<<<<<<< + * off = parsed.field_data[uparser.UF_USERINFO].off + * ln = parsed.field_data[uparser.UF_USERINFO].len + */ + } + + /* "httptools/parser/url_parser.pyx":103 + * userinfo = buf_data[off:off+ln] + * + * return URL(schema, host, port, path, query, fragment, userinfo) # <<<<<<<<<<<<<< + * else: + * raise HttpParserInvalidURLError("invalid url {!r}".format(url)) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyTuple_New(7); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 103, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_schema); + __Pyx_GIVEREF(__pyx_v_schema); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_schema); + __Pyx_INCREF(__pyx_v_host); + __Pyx_GIVEREF(__pyx_v_host); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_host); + __Pyx_INCREF(__pyx_v_port); + __Pyx_GIVEREF(__pyx_v_port); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_port); + __Pyx_INCREF(__pyx_v_path); + __Pyx_GIVEREF(__pyx_v_path); + PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_path); + __Pyx_INCREF(__pyx_v_query); + __Pyx_GIVEREF(__pyx_v_query); + PyTuple_SET_ITEM(__pyx_t_4, 4, __pyx_v_query); + __Pyx_INCREF(__pyx_v_fragment); + __Pyx_GIVEREF(__pyx_v_fragment); + PyTuple_SET_ITEM(__pyx_t_4, 5, __pyx_v_fragment); + __Pyx_INCREF(__pyx_v_userinfo); + __Pyx_GIVEREF(__pyx_v_userinfo); + PyTuple_SET_ITEM(__pyx_t_4, 6, __pyx_v_userinfo); + __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_9httptools_6parser_10url_parser_URL), __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 103, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L3_return; + + /* "httptools/parser/url_parser.pyx":69 + * res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) + * + * if res == 0: # <<<<<<<<<<<<<< + * if parsed.field_set & (1 << uparser.UF_SCHEMA): + * off = parsed.field_data[uparser.UF_SCHEMA].off + */ + } + + /* "httptools/parser/url_parser.pyx":105 + * return URL(schema, host, port, path, query, fragment, userinfo) + * else: + * raise HttpParserInvalidURLError("invalid url {!r}".format(url)) # <<<<<<<<<<<<<< + * finally: + * PyBuffer_Release(&py_buf) + */ + /*else*/ { + __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 105, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_invalid_url_r, __pyx_n_s_format); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 105, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + __pyx_t_6 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_v_url) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_url); + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 105, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + __pyx_t_5 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_7, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 105, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_Raise(__pyx_t_5, 0, 0, 0); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __PYX_ERR(1, 105, __pyx_L4_error) + } + } + + /* "httptools/parser/url_parser.pyx":107 + * raise HttpParserInvalidURLError("invalid url {!r}".format(url)) + * finally: + * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< + * PyMem_Free(parsed) + */ + /*finally:*/ { + __pyx_L4_error:; + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13) < 0)) __Pyx_ErrFetch(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __pyx_t_1 = __pyx_lineno; __pyx_t_9 = __pyx_clineno; __pyx_t_10 = __pyx_filename; + { + PyBuffer_Release((&__pyx_v_py_buf)); + + /* "httptools/parser/url_parser.pyx":108 + * finally: + * PyBuffer_Release(&py_buf) + * PyMem_Free(parsed) # <<<<<<<<<<<<<< + */ + PyMem_Free(__pyx_v_parsed); + } + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16); + } + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ErrRestore(__pyx_t_11, __pyx_t_12, __pyx_t_13); + __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; + __pyx_lineno = __pyx_t_1; __pyx_clineno = __pyx_t_9; __pyx_filename = __pyx_t_10; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_16 = __pyx_r; + __pyx_r = 0; + + /* "httptools/parser/url_parser.pyx":107 + * raise HttpParserInvalidURLError("invalid url {!r}".format(url)) + * finally: + * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< + * PyMem_Free(parsed) + */ + PyBuffer_Release((&__pyx_v_py_buf)); + + /* "httptools/parser/url_parser.pyx":108 + * finally: + * PyBuffer_Release(&py_buf) + * PyMem_Free(parsed) # <<<<<<<<<<<<<< + */ + PyMem_Free(__pyx_v_parsed); + __pyx_r = __pyx_t_16; + __pyx_t_16 = 0; + goto __pyx_L0; + } + } + + /* "httptools/parser/url_parser.pyx":43 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("httptools.parser.url_parser.parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_schema); + __Pyx_XDECREF(__pyx_v_host); + __Pyx_XDECREF(__pyx_v_port); + __Pyx_XDECREF(__pyx_v_path); + __Pyx_XDECREF(__pyx_v_query); + __Pyx_XDECREF(__pyx_v_fragment); + __Pyx_XDECREF(__pyx_v_userinfo); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_freelist_9httptools_6parser_10url_parser_URL[250]; +static int __pyx_freecount_9httptools_6parser_10url_parser_URL = 0; + +static PyObject *__pyx_tp_new_9httptools_6parser_10url_parser_URL(PyTypeObject *t, PyObject *a, PyObject *k) { + struct __pyx_obj_9httptools_6parser_10url_parser_URL *p; + PyObject *o; + if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_9httptools_6parser_10url_parser_URL > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + o = (PyObject*)__pyx_freelist_9httptools_6parser_10url_parser_URL[--__pyx_freecount_9httptools_6parser_10url_parser_URL]; + memset(o, 0, sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL)); + (void) PyObject_INIT(o, t); + PyObject_GC_Track(o); + } else { + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + } + p = ((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o); + p->schema = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->host = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->port = Py_None; Py_INCREF(Py_None); + p->path = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->query = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->fragment = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->userinfo = ((PyObject*)Py_None); Py_INCREF(Py_None); + if (unlikely(__pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(o, a, k) < 0)) goto bad; + return o; + bad: + Py_DECREF(o); o = 0; + return NULL; +} + +static void __pyx_tp_dealloc_9httptools_6parser_10url_parser_URL(PyObject *o) { + struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o; + #if CYTHON_USE_TP_FINALIZE + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->schema); + Py_CLEAR(p->host); + Py_CLEAR(p->port); + Py_CLEAR(p->path); + Py_CLEAR(p->query); + Py_CLEAR(p->fragment); + Py_CLEAR(p->userinfo); + if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_9httptools_6parser_10url_parser_URL < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) { + __pyx_freelist_9httptools_6parser_10url_parser_URL[__pyx_freecount_9httptools_6parser_10url_parser_URL++] = ((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o); + } else { + (*Py_TYPE(o)->tp_free)(o); + } +} + +static int __pyx_tp_traverse_9httptools_6parser_10url_parser_URL(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o; + if (p->port) { + e = (*v)(p->port, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_9httptools_6parser_10url_parser_URL(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o; + tmp = ((PyObject*)p->port); + p->port = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_schema(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_host(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_port(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_path(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_query(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_fragment(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(o); +} + +static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_userinfo(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(o); +} + +static PyMethodDef __pyx_methods_9httptools_6parser_10url_parser_URL[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_9httptools_6parser_10url_parser_URL[] = { + {(char *)"schema", __pyx_getprop_9httptools_6parser_10url_parser_3URL_schema, 0, (char *)0, 0}, + {(char *)"host", __pyx_getprop_9httptools_6parser_10url_parser_3URL_host, 0, (char *)0, 0}, + {(char *)"port", __pyx_getprop_9httptools_6parser_10url_parser_3URL_port, 0, (char *)0, 0}, + {(char *)"path", __pyx_getprop_9httptools_6parser_10url_parser_3URL_path, 0, (char *)0, 0}, + {(char *)"query", __pyx_getprop_9httptools_6parser_10url_parser_3URL_query, 0, (char *)0, 0}, + {(char *)"fragment", __pyx_getprop_9httptools_6parser_10url_parser_3URL_fragment, 0, (char *)0, 0}, + {(char *)"userinfo", __pyx_getprop_9httptools_6parser_10url_parser_3URL_userinfo, 0, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_9httptools_6parser_10url_parser_URL = { + PyVarObject_HEAD_INIT(0, 0) + "httptools.parser.url_parser.URL", /*tp_name*/ + sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_9httptools_6parser_10url_parser_URL, /*tp_dealloc*/ + #if PY_VERSION_HEX < 0x030800b4 + 0, /*tp_print*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 + 0, /*tp_vectorcall_offset*/ + #endif + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + __pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_9httptools_6parser_10url_parser_URL, /*tp_traverse*/ + __pyx_tp_clear_9httptools_6parser_10url_parser_URL, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_9httptools_6parser_10url_parser_URL, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_9httptools_6parser_10url_parser_URL, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_9httptools_6parser_10url_parser_URL, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif + #if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800) + 0, /*tp_vectorcall*/ + #endif + #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 + 0, /*tp_print*/ + #endif + #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000 + 0, /*tp_pypy_flags*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +#if CYTHON_PEP489_MULTI_PHASE_INIT +static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ +static int __pyx_pymod_exec_url_parser(PyObject* module); /*proto*/ +static PyModuleDef_Slot __pyx_moduledef_slots[] = { + {Py_mod_create, (void*)__pyx_pymod_create}, + {Py_mod_exec, (void*)__pyx_pymod_exec_url_parser}, + {0, NULL} +}; +#endif + +static struct PyModuleDef __pyx_moduledef = { + PyModuleDef_HEAD_INIT, + "url_parser", + 0, /* m_doc */ + #if CYTHON_PEP489_MULTI_PHASE_INIT + 0, /* m_size */ + #else + -1, /* m_size */ + #endif + __pyx_methods /* m_methods */, + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_moduledef_slots, /* m_slots */ + #else + NULL, /* m_reload */ + #endif + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif +#ifndef CYTHON_SMALL_CODE +#if defined(__clang__) + #define CYTHON_SMALL_CODE +#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) + #define CYTHON_SMALL_CODE __attribute__((cold)) +#else + #define CYTHON_SMALL_CODE +#endif +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_n_s_HttpParserInvalidURLError, __pyx_k_HttpParserInvalidURLError, sizeof(__pyx_k_HttpParserInvalidURLError), 0, 0, 1, 1}, + {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, + {&__pyx_n_s_URL, __pyx_k_URL, sizeof(__pyx_k_URL), 0, 0, 1, 1}, + {&__pyx_kp_u_URL_schema_r_host_r_port_r_path, __pyx_k_URL_schema_r_host_r_port_r_path, sizeof(__pyx_k_URL_schema_r_host_r_port_r_path), 0, 1, 0, 0}, + {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1}, + {&__pyx_n_s_buf_data, __pyx_k_buf_data, sizeof(__pyx_k_buf_data), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_errors, __pyx_k_errors, sizeof(__pyx_k_errors), 0, 0, 1, 1}, + {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, + {&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1}, + {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, + {&__pyx_n_s_host, __pyx_k_host, sizeof(__pyx_k_host), 0, 0, 1, 1}, + {&__pyx_n_s_httptools_parser_url_parser, __pyx_k_httptools_parser_url_parser, sizeof(__pyx_k_httptools_parser_url_parser), 0, 0, 1, 1}, + {&__pyx_kp_s_httptools_parser_url_parser_pyx, __pyx_k_httptools_parser_url_parser_pyx, sizeof(__pyx_k_httptools_parser_url_parser_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_kp_u_invalid_url_r, __pyx_k_invalid_url_r, sizeof(__pyx_k_invalid_url_r), 0, 1, 0, 0}, + {&__pyx_n_s_ln, __pyx_k_ln, sizeof(__pyx_k_ln), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, + {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, + {&__pyx_n_s_off, __pyx_k_off, sizeof(__pyx_k_off), 0, 0, 1, 1}, + {&__pyx_n_s_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 0, 1, 1}, + {&__pyx_n_u_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 1, 0, 1}, + {&__pyx_n_s_parsed, __pyx_k_parsed, sizeof(__pyx_k_parsed), 0, 0, 1, 1}, + {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, + {&__pyx_n_s_port, __pyx_k_port, sizeof(__pyx_k_port), 0, 0, 1, 1}, + {&__pyx_n_s_py_buf, __pyx_k_py_buf, sizeof(__pyx_k_py_buf), 0, 0, 1, 1}, + {&__pyx_n_s_query, __pyx_k_query, sizeof(__pyx_k_query), 0, 0, 1, 1}, + {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, + {&__pyx_n_s_res, __pyx_k_res, sizeof(__pyx_k_res), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_schema, __pyx_k_schema, sizeof(__pyx_k_schema), 0, 0, 1, 1}, + {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 0, 1, 1}, + {&__pyx_n_s_userinfo, __pyx_k_userinfo, sizeof(__pyx_k_userinfo), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "(tree fragment)":2 + * def __reduce_cython__(self): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + */ + __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple_); + __Pyx_GIVEREF(__pyx_tuple_); + + /* "(tree fragment)":4 + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") + * def __setstate_cython__(self, __pyx_state): + * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "httptools/parser/url_parser.pyx":13 + * from . cimport url_cparser as uparser + * + * __all__ = ('parse_url',) # <<<<<<<<<<<<<< + * + * @cython.freelist(250) + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_n_u_parse_url); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "httptools/parser/url_parser.pyx":43 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + __pyx_tuple__4 = PyTuple_Pack(15, __pyx_n_s_url, __pyx_n_s_py_buf, __pyx_n_s_buf_data, __pyx_n_s_parsed, __pyx_n_s_res, __pyx_n_s_schema, __pyx_n_s_host, __pyx_n_s_port, __pyx_n_s_path, __pyx_n_s_query, __pyx_n_s_fragment, __pyx_n_s_userinfo, __pyx_n_s_result, __pyx_n_s_off, __pyx_n_s_ln); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + __pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(1, 0, 15, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_httptools_parser_url_parser_pyx, __pyx_n_s_parse_url, 43, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(1, 43, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(1, 1, __pyx_L1_error); + return 0; + __pyx_L1_error:; + return -1; +} + +static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ +static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ + +static int __Pyx_modinit_global_init_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); + /*--- Global init code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_variable_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); + /*--- Variable export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_export_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); + /*--- Function export code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_type_init_code(void) { + __Pyx_RefNannyDeclarations + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + #if PY_VERSION_HEX < 0x030800B1 + __pyx_type_9httptools_6parser_10url_parser_URL.tp_print = 0; + #endif + if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_10url_parser_URL.tp_dictoffset && __pyx_type_9httptools_6parser_10url_parser_URL.tp_getattro == PyObject_GenericGetAttr)) { + __pyx_type_9httptools_6parser_10url_parser_URL.tp_getattro = __Pyx_PyObject_GenericGetAttr; + } + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_URL, (PyObject *)&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error) + __pyx_ptype_9httptools_6parser_10url_parser_URL = &__pyx_type_9httptools_6parser_10url_parser_URL; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_type_import_code(void) { + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); + /*--- Type import code ---*/ + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type", + #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000 + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn); + if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_modinit_variable_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); + /*--- Variable import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + +static int __Pyx_modinit_function_import_code(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); + /*--- Function import code ---*/ + __Pyx_RefNannyFinishContext(); + return 0; +} + + +#ifndef CYTHON_NO_PYINIT_EXPORT +#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC +#elif PY_MAJOR_VERSION < 3 +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" void +#else +#define __Pyx_PyMODINIT_FUNC void +#endif +#else +#ifdef __cplusplus +#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * +#else +#define __Pyx_PyMODINIT_FUNC PyObject * +#endif +#endif + + +#if PY_MAJOR_VERSION < 3 +__Pyx_PyMODINIT_FUNC initurl_parser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC initurl_parser(void) +#else +__Pyx_PyMODINIT_FUNC PyInit_url_parser(void) CYTHON_SMALL_CODE; /*proto*/ +__Pyx_PyMODINIT_FUNC PyInit_url_parser(void) +#if CYTHON_PEP489_MULTI_PHASE_INIT +{ + return PyModuleDef_Init(&__pyx_moduledef); +} +static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { + #if PY_VERSION_HEX >= 0x030700A1 + static PY_INT64_T main_interpreter_id = -1; + PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); + if (main_interpreter_id == -1) { + main_interpreter_id = current_id; + return (unlikely(current_id == -1)) ? -1 : 0; + } else if (unlikely(main_interpreter_id != current_id)) + #else + static PyInterpreterState *main_interpreter = NULL; + PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; + if (!main_interpreter) { + main_interpreter = current_interpreter; + } else if (unlikely(main_interpreter != current_interpreter)) + #endif + { + PyErr_SetString( + PyExc_ImportError, + "Interpreter change detected - this module can only be loaded into one interpreter per process."); + return -1; + } + return 0; +} +static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { + PyObject *value = PyObject_GetAttrString(spec, from_name); + int result = 0; + if (likely(value)) { + if (allow_none || value != Py_None) { + result = PyDict_SetItemString(moddict, to_name, value); + } + Py_DECREF(value); + } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Clear(); + } else { + result = -1; + } + return result; +} +static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { + PyObject *module = NULL, *moddict, *modname; + if (__Pyx_check_single_interpreter()) + return NULL; + if (__pyx_m) + return __Pyx_NewRef(__pyx_m); + modname = PyObject_GetAttrString(spec, "name"); + if (unlikely(!modname)) goto bad; + module = PyModule_NewObject(modname); + Py_DECREF(modname); + if (unlikely(!module)) goto bad; + moddict = PyModule_GetDict(module); + if (unlikely(!moddict)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; + if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; + return module; +bad: + Py_XDECREF(module); + return NULL; +} + + +static CYTHON_SMALL_CODE int __pyx_pymod_exec_url_parser(PyObject *__pyx_pyinit_module) +#endif +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_lineno = 0; + const char *__pyx_filename = NULL; + int __pyx_clineno = 0; + __Pyx_RefNannyDeclarations + #if CYTHON_PEP489_MULTI_PHASE_INIT + if (__pyx_m) { + if (__pyx_m == __pyx_pyinit_module) return 0; + PyErr_SetString(PyExc_RuntimeError, "Module 'url_parser' has already been imported. Re-initialisation is not supported."); + return -1; + } + #elif PY_MAJOR_VERSION >= 3 + if (__pyx_m) return __Pyx_NewRef(__pyx_m); + #endif + #if CYTHON_REFNANNY +__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); +if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); +} +#endif + __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_url_parser(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #ifdef __Pxy_PyFrame_Initialize_Offsets + __Pxy_PyFrame_Initialize_Offsets(); + #endif + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_AsyncGen_USED + if (__pyx_AsyncGen_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + PyEval_InitThreads(); + #endif + /*--- Module creation code ---*/ + #if CYTHON_PEP489_MULTI_PHASE_INIT + __pyx_m = __pyx_pyinit_module; + Py_INCREF(__pyx_m); + #else + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("url_parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error) + Py_INCREF(__pyx_b); + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error) + Py_INCREF(__pyx_cython_runtime); + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main_httptools__parser__url_parser) { + if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "httptools.parser.url_parser")) { + if (unlikely(PyDict_SetItemString(modules, "httptools.parser.url_parser", __pyx_m) < 0)) __PYX_ERR(1, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + /*--- Global type/function init code ---*/ + (void)__Pyx_modinit_global_init_code(); + (void)__Pyx_modinit_variable_export_code(); + (void)__Pyx_modinit_function_export_code(); + if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(1, 1, __pyx_L1_error) + if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(1, 1, __pyx_L1_error) + (void)__Pyx_modinit_variable_import_code(); + (void)__Pyx_modinit_function_import_code(); + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error) + #endif + + /* "httptools/parser/url_parser.pyx":8 + * Py_buffer + * + * from .errors import HttpParserInvalidURLError # <<<<<<<<<<<<<< + * + * cimport cython + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_HttpParserInvalidURLError); + __Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidURLError); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpParserInvalidURLError); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_errors, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidURLError, __pyx_t_1) < 0) __PYX_ERR(1, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "httptools/parser/url_parser.pyx":13 + * from . cimport url_cparser as uparser + * + * __all__ = ('parse_url',) # <<<<<<<<<<<<<< + * + * @cython.freelist(250) + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__3) < 0) __PYX_ERR(1, 13, __pyx_L1_error) + + /* "httptools/parser/url_parser.pyx":43 + * + * + * def parse_url(url): # <<<<<<<<<<<<<< + * cdef: + * Py_buffer py_buf + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_9httptools_6parser_10url_parser_1parse_url, NULL, __pyx_n_s_httptools_parser_url_parser); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url, __pyx_t_2) < 0) __PYX_ERR(1, 43, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "httptools/parser/url_parser.pyx":1 + * #cython: language_level=3 # <<<<<<<<<<<<<< + * + * from __future__ import print_function + */ + __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init httptools.parser.url_parser", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_CLEAR(__pyx_m); + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init httptools.parser.url_parser"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if CYTHON_PEP489_MULTI_PHASE_INIT + return (__pyx_m != NULL) ? 0 : -1; + #elif PY_MAJOR_VERSION >= 3 + return __pyx_m; + #else + return; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule(modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, "RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* PyObjectGetAttrStr */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* RaiseArgTupleInvalid */ +static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ +static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ +static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ +static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + else if (exact) { + #if PY_MAJOR_VERSION == 2 + if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(__Pyx_TypeCheck(obj, type))) return 1; + } + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); + return 0; +} + +/* PyFunctionFastCall */ +#if CYTHON_FAST_PYCALL +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = __Pyx_PyFrame_GetLocalsplus(f); + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, (int)nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyCFunctionFastCall */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); + } +} +#endif + +/* PyObjectCall */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = Py_TYPE(func)->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyErrFetchRestore */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* RaiseException */ +#if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } + if (cause) { + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = __Pyx_PyThreadState_Current; + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyDictVersioning */ +#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; +} +static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { + PyObject **dictptr = NULL; + Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; + if (offset) { +#if CYTHON_COMPILING_IN_CPYTHON + dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); +#else + dictptr = _PyObject_GetDictPtr(obj); +#endif + } + return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; +} +static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { + PyObject *dict = Py_TYPE(obj)->tp_dict; + if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) + return 0; + return obj_dict_version == __Pyx_get_object_dict_version(obj); +} +#endif + +/* GetModuleGlobalName */ +#if CYTHON_USE_DICT_VERSIONS +static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) +#else +static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) +#endif +{ + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS +#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 + result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } else if (unlikely(PyErr_Occurred())) { + return NULL; + } +#else + result = PyDict_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } +#endif +#else + result = PyObject_GetItem(__pyx_d, name); + __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) + if (likely(result)) { + return __Pyx_NewRef(result); + } + PyErr_Clear(); +#endif + return __Pyx_GetBuiltinName(name); +} + +/* PyObjectCall2Args */ +static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { + PyObject *args, *result = NULL; + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyFunction_FastCall(function, args, 2); + } + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(function)) { + PyObject *args[2] = {arg1, arg2}; + return __Pyx_PyCFunction_FastCall(function, args, 2); + } + #endif + args = PyTuple_New(2); + if (unlikely(!args)) goto done; + Py_INCREF(arg1); + PyTuple_SET_ITEM(args, 0, arg1); + Py_INCREF(arg2); + PyTuple_SET_ITEM(args, 1, arg2); + Py_INCREF(function); + result = __Pyx_PyObject_Call(function, args, NULL); + Py_DECREF(args); + Py_DECREF(function); +done: + return result; +} + +/* PyObjectCallMethO */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ +#if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (__Pyx_PyFastCFunction_Check(func)) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* GetException */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) +#endif +{ + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + #if CYTHON_USE_EXC_INFO_STACK + { + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = local_type; + exc_info->exc_value = local_value; + exc_info->exc_traceback = local_tb; + } + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* SwapException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = *type; + exc_info->exc_value = *value; + exc_info->exc_traceback = *tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + #endif + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* GetTopmostException */ +#if CYTHON_USE_EXC_INFO_STACK +static _PyErr_StackItem * +__Pyx_PyErr_GetTopmostException(PyThreadState *tstate) +{ + _PyErr_StackItem *exc_info = tstate->exc_info; + while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) && + exc_info->previous_item != NULL) + { + exc_info = exc_info->previous_item; + } + return exc_info; +} +#endif + +/* SaveResetException */ +#if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate); + *type = exc_info->exc_type; + *value = exc_info->exc_value; + *tb = exc_info->exc_traceback; + #else + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + #endif + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + #if CYTHON_USE_EXC_INFO_STACK + _PyErr_StackItem *exc_info = tstate->exc_info; + tmp_type = exc_info->exc_type; + tmp_value = exc_info->exc_value; + tmp_tb = exc_info->exc_traceback; + exc_info->exc_type = type; + exc_info->exc_value = value; + exc_info->exc_traceback = tb; + #else + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + #endif + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyObject_GenericGetAttrNoDict */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { + PyErr_Format(PyExc_AttributeError, +#if PY_MAJOR_VERSION >= 3 + "'%.50s' object has no attribute '%U'", + tp->tp_name, attr_name); +#else + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(attr_name)); +#endif + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { + PyObject *descr; + PyTypeObject *tp = Py_TYPE(obj); + if (unlikely(!PyString_Check(attr_name))) { + return PyObject_GenericGetAttr(obj, attr_name); + } + assert(!tp->tp_dictoffset); + descr = _PyType_Lookup(tp, attr_name); + if (unlikely(!descr)) { + return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); + } + Py_INCREF(descr); + #if PY_MAJOR_VERSION < 3 + if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) + #endif + { + descrgetfunc f = Py_TYPE(descr)->tp_descr_get; + if (unlikely(f)) { + PyObject *res = f(descr, obj, (PyObject *)tp); + Py_DECREF(descr); + return res; + } + } + return descr; +} +#endif + +/* PyObject_GenericGetAttr */ +#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 +static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { + if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { + return PyObject_GenericGetAttr(obj, attr_name); + } + return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); +} +#endif + +/* PyErrExceptionMatches */ +#if CYTHON_FAST_THREAD_STATE +static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; icurexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + if (unlikely(PyTuple_Check(err))) + return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); + return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* PyObjectGetAttrStrNoError */ +static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) + __Pyx_PyErr_Clear(); +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { + PyObject *result; +#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { + return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); + } +#endif + result = __Pyx_PyObject_GetAttrStr(obj, attr_name); + if (unlikely(!result)) { + __Pyx_PyObject_GetAttrStr_ClearAttributeError(); + } + return result; +} + +/* SetupReduce */ +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); + if (likely(name_attr)) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (unlikely(ret < 0)) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject *object_reduce = NULL; + PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; +#if CYTHON_USE_PYTYPE_LOOKUP + if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; +#else + if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; +#endif +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#else + object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; +#endif + reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; + if (reduce_ex == object_reduce_ex) { +#if CYTHON_USE_PYTYPE_LOOKUP + object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#else + object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; +#endif + reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; + if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); + if (likely(reduce_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (reduce == object_reduce || PyErr_Occurred()) { + goto __PYX_BAD; + } + setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); + if (likely(setstate_cython)) { + ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; + } else if (!setstate || PyErr_Occurred()) { + goto __PYX_BAD; + } + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto __PYX_GOOD; +__PYX_BAD: + if (!PyErr_Occurred()) + PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +__PYX_GOOD: +#if !CYTHON_USE_PYTYPE_LOOKUP + Py_XDECREF(object_reduce); + Py_XDECREF(object_reduce_ex); +#endif + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* TypeImport */ +#ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name, + size_t size, enum __Pyx_ImportType_CheckSize check_size) +{ + PyObject *result = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + result = PyObject_GetAttrString(module, class_name); + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if ((size_t)basicsize < size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + goto bad; + } + else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. " + "Expected %zd from C header, got %zd from PyObject", + module_name, class_name, size, basicsize); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(result); + return NULL; +} +#endif + +/* Import */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_MAJOR_VERSION < 3 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_MAJOR_VERSION < 3 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* CLineInTraceback */ +#ifndef CYTHON_CLINE_IN_TRACEBACK +static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { + PyObject *use_cline; + PyObject *ptype, *pvalue, *ptraceback; +#if CYTHON_COMPILING_IN_CPYTHON + PyObject **cython_runtime_dict; +#endif + if (unlikely(!__pyx_cython_runtime)) { + return c_line; + } + __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); +#if CYTHON_COMPILING_IN_CPYTHON + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (likely(cython_runtime_dict)) { + __PYX_PY_DICT_LOOKUP_IF_MODIFIED( + use_cline, *cython_runtime_dict, + __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) + } else +#endif + { + PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + PyErr_Clear(); + use_cline = NULL; + } + } + if (!use_cline) { + c_line = 0; + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { + c_line = 0; + } + __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); + return c_line; +} +#endif + +/* CodeObjectCache */ +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ +#include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; + #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; + py_srcfile = PyString_FromString(filename); + if (!py_srcfile) goto bad; + #endif + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + if (!py_funcname) goto bad; + #endif + } + #if PY_MAJOR_VERSION < 3 + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline + return py_code; +bad: + Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + PyThreadState *tstate = __Pyx_PyThreadState_Current; + if (c_line) { + c_line = __Pyx_CLineForTraceback(tstate, c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + tstate, /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const uint16_t neg_one = (uint16_t) -1, const_zero = (uint16_t) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(uint16_t) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(uint16_t) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(uint16_t) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(uint16_t), + little, !is_unsigned); + } +} + +/* CIntToPy */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ +#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntFromPy */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const long neg_one = (long) -1, const_zero = (long) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CIntFromPy */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wconversion" +#endif + const int neg_one = (int) -1, const_zero = (int) 0; +#ifdef __Pyx_HAS_GCC_DIAGNOSTIC +#pragma GCC diagnostic pop +#endif + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* FastTypeChecks */ +#if CYTHON_COMPILING_IN_CPYTHON +static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { + while (a) { + a = a->tp_base; + if (a == b) + return 1; + } + return b == &PyBaseObject_Type; +} +static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { + PyObject *mro; + if (a == b) return 1; + mro = a->tp_mro; + if (likely(mro)) { + Py_ssize_t i, n; + n = PyTuple_GET_SIZE(mro); + for (i = 0; i < n; i++) { + if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) + return 1; + } + return 0; + } + return __Pyx_InBases(a, b); +} +#if PY_MAJOR_VERSION == 2 +static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { + PyObject *exception, *value, *tb; + int res; + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&exception, &value, &tb); + res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + if (!res) { + res = PyObject_IsSubclass(err, exc_type2); + if (unlikely(res == -1)) { + PyErr_WriteUnraisable(err); + res = 0; + } + } + __Pyx_ErrRestore(exception, value, tb); + return res; +} +#else +static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { + int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; + if (!res) { + res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); + } + return res; +} +#endif +static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { + Py_ssize_t i, n; + assert(PyExceptionClass_Check(exc_type)); + n = PyTuple_GET_SIZE(tuple); +#if PY_MAJOR_VERSION >= 3 + for (i=0; ip) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +#if !CYTHON_PEP393_ENABLED +static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +} +#else +static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { + if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (likely(PyUnicode_IS_ASCII(o))) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +} +#endif +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { + return __Pyx_PyUnicode_AsStringAndSize(o, length); + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { + int retval; + if (unlikely(!x)) return -1; + retval = __Pyx_PyObject_IsTrue(x); + Py_DECREF(x); + return retval; +} +static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { +#if PY_MAJOR_VERSION >= 3 + if (PyLong_Check(result)) { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "__int__ returned non-int (type %.200s). " + "The ability to return an instance of a strict subclass of int " + "is deprecated, and may be removed in a future version of Python.", + Py_TYPE(result)->tp_name)) { + Py_DECREF(result); + return NULL; + } + return result; + } +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + type_name, type_name, Py_TYPE(result)->tp_name); + Py_DECREF(result); + return NULL; +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x) || PyLong_Check(x))) +#else + if (likely(PyLong_Check(x))) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = m->nb_int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = m->nb_long(x); + } + #else + if (likely(m && m->nb_int)) { + name = "int"; + res = m->nb_int(x); + } + #endif +#else + if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { + res = PyNumber_Int(x); + } +#endif + if (likely(res)) { +#if PY_MAJOR_VERSION < 3 + if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { +#else + if (unlikely(!PyLong_CheckExact(res))) { +#endif + return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(b); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} +static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { + return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.cp37-win_amd64.pyd new file mode 100644 index 00000000..ba98dd72 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/httptools/parser/url_parser.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/LICENSE new file mode 100644 index 00000000..411ce482 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017, 2018, 2019, 2020, 2021 Samuel Colvin and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/METADATA new file mode 100644 index 00000000..9036ed80 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/METADATA @@ -0,0 +1,1221 @@ +Metadata-Version: 2.1 +Name: pydantic +Version: 1.10.1 +Summary: Data validation and settings management using python type hints +Home-page: https://github.com/pydantic/pydantic +Author: Samuel Colvin +Author-email: s@muelcolvin.com +License: MIT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: Unix +Classifier: Operating System :: POSIX :: Linux +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Framework :: Hypothesis +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Internet +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: typing-extensions (>=4.1.0) +Provides-Extra: dotenv +Requires-Dist: python-dotenv (>=0.10.4) ; extra == 'dotenv' +Provides-Extra: email +Requires-Dist: email-validator (>=1.0.3) ; extra == 'email' + +# pydantic + +[![CI](https://github.com/pydantic/pydantic/workflows/CI/badge.svg?event=push)](https://github.com/pydantic/pydantic/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic.svg)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic) +[![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) +[![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) +[![downloads](https://pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) +[![versions](https://img.shields.io/pypi/pyversions/pydantic.svg)](https://github.com/pydantic/pydantic) +[![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) + +Data validation and settings management using Python type hints. + +Fast and extensible, *pydantic* plays nicely with your linters/IDE/brain. +Define how data should be in pure, canonical Python 3.7+; validate it with *pydantic*. + +## Help + +See [documentation](https://pydantic-docs.helpmanual.io/) for more details. + +## Installation + +Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`. +For more installation options to make *pydantic* even faster, +see the [Install](https://pydantic-docs.helpmanual.io/install/) section in the documentation. + +## A Simple Example + +```py +from datetime import datetime +from typing import List, Optional +from pydantic import BaseModel + +class User(BaseModel): + id: int + name = 'John Doe' + signup_ts: Optional[datetime] = None + friends: List[int] = [] + +external_data = {'id': '123', 'signup_ts': '2017-06-01 12:22', 'friends': [1, '2', b'3']} +user = User(**external_data) +print(user) +#> User id=123 name='John Doe' signup_ts=datetime.datetime(2017, 6, 1, 12, 22) friends=[1, 2, 3] +print(user.id) +#> 123 +``` + +## Contributing + +For guidance on setting up a development environment and how to make a +contribution to *pydantic*, see +[Contributing to Pydantic](https://pydantic-docs.helpmanual.io/contributing/). + +## Reporting a Security Vulnerability + +See our [security policy](https://github.com/pydantic/pydantic/security/policy). + + +## v1.10.1 (2022-08-31) + +* Add `__hash__` method to `pydancic.color.Color` class, [#4454](https://github.com/pydantic/pydantic/issues/4454) by [@czaki](https://github.com/czaki) + +## v1.10.0 (2022-08-30) + +* Refactor the whole _pydantic_ `dataclass` decorator to really act like its standard lib equivalent. + It hence keeps `__eq__`, `__hash__`, ... and makes comparison with its non-validated version possible. + It also fixes usage of `frozen` dataclasses in fields and usage of `default_factory` in nested dataclasses. + The support of `Config.extra` has been added. + Finally, config customization directly via a `dict` is now possible, [#2557](https://github.com/pydantic/pydantic/issues/2557) by [@PrettyWood](https://github.com/PrettyWood) +

+ **BREAKING CHANGES:** + - The `compiled` boolean (whether _pydantic_ is compiled with cython) has been moved from `main.py` to `version.py` + - Now that `Config.extra` is supported, `dataclass` ignores by default extra arguments (like `BaseModel`) +* Fix PEP487 `__set_name__` protocol in `BaseModel` for PrivateAttrs, [#4407](https://github.com/pydantic/pydantic/issues/4407) by [@tlambert03](https://github.com/tlambert03) +* Allow for custom parsing of environment variables via `parse_env_var` in `Config`, [#4406](https://github.com/pydantic/pydantic/issues/4406) by [@acmiyaguchi](https://github.com/acmiyaguchi) +* Rename `master` to `main`, [#4405](https://github.com/pydantic/pydantic/issues/4405) by [@hramezani](https://github.com/hramezani) +* Fix `StrictStr` does not raise `ValidationError` when `max_length` is present in `Field`, [#4388](https://github.com/pydantic/pydantic/issues/4388) by [@hramezani](https://github.com/hramezani) +* Make `SecretStr` and `SecretBytes` hashable, [#4387](https://github.com/pydantic/pydantic/issues/4387) by [@chbndrhnns](https://github.com/chbndrhnns) +* Fix `StrictBytes` does not raise `ValidationError` when `max_length` is present in `Field`, [#4380](https://github.com/pydantic/pydantic/issues/4380) by [@JeanArhancet](https://github.com/JeanArhancet) +* Add support for bare `type`, [#4375](https://github.com/pydantic/pydantic/issues/4375) by [@hramezani](https://github.com/hramezani) +* Support Python 3.11, including binaries for 3.11 in PyPI, [#4374](https://github.com/pydantic/pydantic/issues/4374) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add support for `re.Pattern`, [#4366](https://github.com/pydantic/pydantic/issues/4366) by [@hramezani](https://github.com/hramezani) +* Fix `__post_init_post_parse__` is incorrectly passed keyword arguments when no `__post_init__` is defined, [#4361](https://github.com/pydantic/pydantic/issues/4361) by [@hramezani](https://github.com/hramezani) +* Fix implicitly importing `ForwardRef` and `Callable` from `pydantic.typing` instead of `typing` and also expose `MappingIntStrAny`, [#4358](https://github.com/pydantic/pydantic/issues/4358) by [@aminalaee](https://github.com/aminalaee) +* remove `Any` types from the `dataclass` decorator so it can be used with the `disallow_any_expr` mypy option, [#4356](https://github.com/pydantic/pydantic/issues/4356) by [@DetachHead](https://github.com/DetachHead) +* moved repo to `pydantic/pydantic`, [#4348](https://github.com/pydantic/pydantic/issues/4348) by [@yezz123](https://github.com/yezz123) +* fix "extra fields not permitted" error when dataclass with `Extra.forbid` is validated multiple times, [#4343](https://github.com/pydantic/pydantic/issues/4343) by [@detachhead](https://github.com/detachhead) +* Add Python 3.9 and 3.10 examples to docs, [#4339](https://github.com/pydantic/pydantic/issues/4339) by [@Bobronium](https://github.com/Bobronium) +* Discriminated union models now use `oneOf` instead of `anyOf` when generating OpenAPI schema definitions, [#4335](https://github.com/pydantic/pydantic/issues/4335) by [@MaxwellPayne](https://github.com/MaxwellPayne) +* Allow type checkers to infer inner type of `Json` type. `Json[list[str]]` will be now inferred as `list[str]`, + `Json[Any]` should be used instead of plain `Json`. + Runtime behaviour is not changed, [#4332](https://github.com/pydantic/pydantic/issues/4332) by [@Bobronium](https://github.com/Bobronium) +* Allow empty string aliases by using a `alias is not None` check, rather than `bool(alias)`, [#4253](https://github.com/pydantic/pydantic/issues/4253) by [@sergeytsaplin](https://github.com/sergeytsaplin) +* Update `ForwardRef`s in `Field.outer_type_`, [#4249](https://github.com/pydantic/pydantic/issues/4249) by [@JacobHayes](https://github.com/JacobHayes) +* The use of `__dataclass_transform__` has been replaced by `typing_extensions.dataclass_transform`, which is the preferred way to mark pydantic models as a dataclass under [PEP 681](https://peps.python.org/pep-0681/), [#4241](https://github.com/pydantic/pydantic/issues/4241) by [@multimeric](https://github.com/multimeric) +* Use parent model's `Config` when validating nested `NamedTuple` fields, [#4219](https://github.com/pydantic/pydantic/issues/4219) by [@synek](https://github.com/synek) +* Update `BaseModel.construct` to work with aliased Fields, [#4192](https://github.com/pydantic/pydantic/issues/4192) by [@kylebamos](https://github.com/kylebamos) +* Catch certain raised errors in `smart_deepcopy` and revert to `deepcopy` if so, [#4184](https://github.com/pydantic/pydantic/issues/4184) by [@coneybeare](https://github.com/coneybeare) +* Add `Config.anystr_upper` and `to_upper` kwarg to constr and conbytes, [#4165](https://github.com/pydantic/pydantic/issues/4165) by [@satheler](https://github.com/satheler) +* Fix JSON schema for `set` and `frozenset` when they include default values, [#4155](https://github.com/pydantic/pydantic/issues/4155) by [@aminalaee](https://github.com/aminalaee) +* Teach the mypy plugin that methods decorated by `@validator` are classmethods, [#4102](https://github.com/pydantic/pydantic/issues/4102) by [@DMRobertson](https://github.com/DMRobertson) +* Improve mypy plugin's ability to detect required fields, [#4086](https://github.com/pydantic/pydantic/issues/4086) by [@richardxia](https://github.com/richardxia) +* Support fields of type `Type[]` in schema, [#4051](https://github.com/pydantic/pydantic/issues/4051) by [@aminalaee](https://github.com/aminalaee) +* Add `default` value in JSON Schema when `const=True`, [#4031](https://github.com/pydantic/pydantic/issues/4031) by [@aminalaee](https://github.com/aminalaee) +* Adds reserved word check to signature generation logic, [#4011](https://github.com/pydantic/pydantic/issues/4011) by [@strue36](https://github.com/strue36) +* Fix Json strategy failure for the complex nested field, [#4005](https://github.com/pydantic/pydantic/issues/4005) by [@sergiosim](https://github.com/sergiosim) +* Add JSON-compatible float constraint `allow_inf_nan`, [#3994](https://github.com/pydantic/pydantic/issues/3994) by [@tiangolo](https://github.com/tiangolo) +* Remove undefined behaviour when `env_prefix` had characters in common with `env_nested_delimiter`, [#3975](https://github.com/pydantic/pydantic/issues/3975) by [@arsenron](https://github.com/arsenron) +* Support generics model with `create_model`, [#3945](https://github.com/pydantic/pydantic/issues/3945) by [@hot123s](https://github.com/hot123s) +* allow submodels to overwrite extra field info, [#3934](https://github.com/pydantic/pydantic/issues/3934) by [@PrettyWood](https://github.com/PrettyWood) +* Document and test structural pattern matching ([PEP 636](https://peps.python.org/pep-0636/)) on `BaseModel`, [#3920](https://github.com/pydantic/pydantic/issues/3920) by [@irgolic](https://github.com/irgolic) +* Fix incorrect deserialization of python timedelta object to ISO 8601 for negative time deltas. + Minus was serialized in incorrect place ("P-1DT23H59M59.888735S" instead of correct "-P1DT23H59M59.888735S"), [#3899](https://github.com/pydantic/pydantic/issues/3899) by [@07pepa](https://github.com/07pepa) +* Fix validation of discriminated union fields with an alias when passing a model instance, [#3846](https://github.com/pydantic/pydantic/issues/3846) by [@chornsby](https://github.com/chornsby) +* Add a CockroachDsn type to validate CockroachDB connection strings. The type + supports the following schemes: `cockroachdb`, `cockroachdb+psycopg2` and `cockroachdb+asyncpg`, [#3839](https://github.com/pydantic/pydantic/issues/3839) by [@blubber](https://github.com/blubber) +* Fix MyPy plugin to not override pre-existing `__init__` method in models, [#3824](https://github.com/pydantic/pydantic/issues/3824) by [@patrick91](https://github.com/patrick91) +* Fix mypy version checking, [#3783](https://github.com/pydantic/pydantic/issues/3783) by [@KotlinIsland](https://github.com/KotlinIsland) +* support overwriting dunder attributes of `BaseModel` instances, [#3777](https://github.com/pydantic/pydantic/issues/3777) by [@PrettyWood](https://github.com/PrettyWood) +* Added `ConstrainedDate` and `condate`, [#3740](https://github.com/pydantic/pydantic/issues/3740) by [@hottwaj](https://github.com/hottwaj) +* Support `kw_only` in dataclasses, [#3670](https://github.com/pydantic/pydantic/issues/3670) by [@detachhead](https://github.com/detachhead) +* Add comparison method for `Color` class, [#3646](https://github.com/pydantic/pydantic/issues/3646) by [@aminalaee](https://github.com/aminalaee) +* Drop support for python3.6, associated cleanup, [#3605](https://github.com/pydantic/pydantic/issues/3605) by [@samuelcolvin](https://github.com/samuelcolvin) +* created new function `to_lower_camel()` for "non pascal case" camel case, [#3463](https://github.com/pydantic/pydantic/issues/3463) by [@schlerp](https://github.com/schlerp) +* Add checks to `default` and `default_factory` arguments in Mypy plugin, [#3430](https://github.com/pydantic/pydantic/issues/3430) by [@klaa97](https://github.com/klaa97) +* fix mangling of `inspect.signature` for `BaseModel`, [#3413](https://github.com/pydantic/pydantic/issues/3413) by [@fix-inspect-signature](https://github.com/fix-inspect-signature) +* Adds the `SecretField` abstract class so that all the current and future secret fields like `SecretStr` and `SecretBytes` will derive from it, [#3409](https://github.com/pydantic/pydantic/issues/3409) by [@expobrain](https://github.com/expobrain) +* Support multi hosts validation in `PostgresDsn`, [#3337](https://github.com/pydantic/pydantic/issues/3337) by [@rglsk](https://github.com/rglsk) +* Fix parsing of very small numeric timedelta values, [#3315](https://github.com/pydantic/pydantic/issues/3315) by [@samuelcolvin](https://github.com/samuelcolvin) +* Update `SecretsSettingsSource` to respect `config.case_sensitive`, [#3273](https://github.com/pydantic/pydantic/issues/3273) by [@JeanArhancet](https://github.com/JeanArhancet) +* Add MongoDB network data source name (DSN) schema, [#3229](https://github.com/pydantic/pydantic/issues/3229) by [@snosratiershad](https://github.com/snosratiershad) +* Add support for multiple dotenv files, [#3222](https://github.com/pydantic/pydantic/issues/3222) by [@rekyungmin](https://github.com/rekyungmin) +* Raise an explicit `ConfigError` when multiple fields are incorrectly set for a single validator, [#3215](https://github.com/pydantic/pydantic/issues/3215) by [@SunsetOrange](https://github.com/SunsetOrange) +* Allow ellipsis on `Field`s inside `Annotated` for `TypedDicts` required, [#3133](https://github.com/pydantic/pydantic/issues/3133) by [@ezegomez](https://github.com/ezegomez) +* Catch overflow errors in `int_validator`, [#3112](https://github.com/pydantic/pydantic/issues/3112) by [@ojii](https://github.com/ojii) +* Adds a `__rich_repr__` method to `Representation` class which enables pretty printing with [Rich](https://github.com/willmcgugan/rich), [#3099](https://github.com/pydantic/pydantic/issues/3099) by [@willmcgugan](https://github.com/willmcgugan) +* Add percent encoding in `AnyUrl` and descendent types, [#3061](https://github.com/pydantic/pydantic/issues/3061) by [@FaresAhmedb](https://github.com/FaresAhmedb) +* `validate_arguments` decorator now supports `alias`, [#3019](https://github.com/pydantic/pydantic/issues/3019) by [@MAD-py](https://github.com/MAD-py) +* Avoid `__dict__` and `__weakref__` attributes in `AnyUrl` and IP address fields, [#2890](https://github.com/pydantic/pydantic/issues/2890) by [@nuno-andre](https://github.com/nuno-andre) +* Add ability to use `Final` in a field type annotation, [#2766](https://github.com/pydantic/pydantic/issues/2766) by [@uriyyo](https://github.com/uriyyo) +* Update requirement to `typing_extensions>=4.1.0` to guarantee `dataclass_transform` is available, [#4424](https://github.com/pydantic/pydantic/issues/4424) by [@commonism](https://github.com/commonism) +* Add Explosion and AWS to main sponsors, [#4413](https://github.com/pydantic/pydantic/issues/4413) by [@samuelcolvin](https://github.com/samuelcolvin) +* Update documentation for `copy_on_model_validation` to reflect recent changes, [#4369](https://github.com/pydantic/pydantic/issues/4369) by [@samuelcolvin](https://github.com/samuelcolvin) +* Runtime warning if `__slots__` is passed to `create_model`, `__slots__` is then ignored, [#4432](https://github.com/pydantic/pydantic/issues/4432) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add type hints to `BaseSettings.Config` to avoid mypy errors, also correct mypy version compatibility notice in docs, [#4450](https://github.com/pydantic/pydantic/issues/4450) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v1.10.0b1 (2022-08-24) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0b1) for details. + +## v1.10.0a2 (2022-08-24) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0a2) for details. + +## v1.10.0a1 (2022-08-22) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v1.10.0a1) for details. + +## v1.9.2 (2022-08-11) + +**Revert Breaking Change**: _v1.9.1_ introduced a breaking change where model fields were +deep copied by default, this release reverts the default behaviour to match _v1.9.0_ and before, +while also allow deep-copy behaviour via `copy_on_model_validation = 'deep'`. See [#4092](https://github.com/pydantic/pydantic/issues/4092) for more information. + +* Allow for shallow copies of model fields, `Config.copy_on_model_validation` is now a str which must be + `'none'`, `'deep'`, or `'shallow'` corresponding to not copying, deep copy & shallow copy; default `'shallow'`, + [#4093](https://github.com/pydantic/pydantic/issues/4093) by [@timkpaine](https://github.com/timkpaine) + +## v1.9.1 (2022-05-19) + +Thank you to pydantic's sponsors: +@tiangolo, [@stellargraph](https://github.com/stellargraph), [@JonasKs](https://github.com/JonasKs), [@grillazz](https://github.com/grillazz), [@Mazyod](https://github.com/Mazyod), [@kevinalh](https://github.com/kevinalh), [@chdsbd](https://github.com/chdsbd), [@povilasb](https://github.com/povilasb), [@povilasb](https://github.com/povilasb), [@jina-ai](https://github.com/jina-ai), +@mainframeindustries, [@robusta-dev](https://github.com/robusta-dev), [@SendCloud](https://github.com/SendCloud), [@rszamszur](https://github.com/rszamszur), [@jodal](https://github.com/jodal), [@hardbyte](https://github.com/hardbyte), [@corleyma](https://github.com/corleyma), [@daddycocoaman](https://github.com/daddycocoaman), +@Rehket, [@jokull](https://github.com/jokull), [@reillysiemens](https://github.com/reillysiemens), [@westonsteimel](https://github.com/westonsteimel), [@primer-io](https://github.com/primer-io), [@koxudaxi](https://github.com/koxudaxi), [@browniebroke](https://github.com/browniebroke), [@stradivari96](https://github.com/stradivari96), +@adriangb, [@kamalgill](https://github.com/kamalgill), [@jqueguiner](https://github.com/jqueguiner), [@dev-zero](https://github.com/dev-zero), [@datarootsio](https://github.com/datarootsio), [@RedCarpetUp](https://github.com/RedCarpetUp) +for their kind support. + +* Limit the size of `generics._generic_types_cache` and `generics._assigned_parameters` + to avoid unlimited increase in memory usage, [#4083](https://github.com/pydantic/pydantic/issues/4083) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add Jupyverse and FPS as Jupyter projects using pydantic, [#4082](https://github.com/pydantic/pydantic/issues/4082) by [@davidbrochart](https://github.com/davidbrochart) +* Speedup `__isinstancecheck__` on pydantic models when the type is not a model, may also avoid memory "leaks", [#4081](https://github.com/pydantic/pydantic/issues/4081) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix in-place modification of `FieldInfo` that caused problems with PEP 593 type aliases, [#4067](https://github.com/pydantic/pydantic/issues/4067) by [@adriangb](https://github.com/adriangb) +* Add support for autocomplete in VS Code via `__dataclass_transform__` when using `pydantic.dataclasses.dataclass`, [#4006](https://github.com/pydantic/pydantic/issues/4006) by [@giuliano-oliveira](https://github.com/giuliano-oliveira) +* Remove benchmarks from codebase and docs, [#3973](https://github.com/pydantic/pydantic/issues/3973) by [@samuelcolvin](https://github.com/samuelcolvin) +* Typing checking with pyright in CI, improve docs on vscode/pylance/pyright, [#3972](https://github.com/pydantic/pydantic/issues/3972) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix nested Python dataclass schema regression, [#3819](https://github.com/pydantic/pydantic/issues/3819) by [@himbeles](https://github.com/himbeles) +* Update documentation about lazy evaluation of sources for Settings, [#3806](https://github.com/pydantic/pydantic/issues/3806) by [@garyd203](https://github.com/garyd203) +* Prevent subclasses of bytes being converted to bytes, [#3706](https://github.com/pydantic/pydantic/issues/3706) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fixed "error checking inheritance of" when using PEP585 and PEP604 type hints, [#3681](https://github.com/pydantic/pydantic/issues/3681) by [@aleksul](https://github.com/aleksul) +* Allow self referencing `ClassVar`s in models, [#3679](https://github.com/pydantic/pydantic/issues/3679) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change, see [#4106](https://github.com/pydantic/pydantic/issues/4106)**: Fix issue with self-referencing dataclass, [#3675](https://github.com/pydantic/pydantic/issues/3675) by [@uriyyo](https://github.com/uriyyo) +* Include non-standard port numbers in rendered URLs, [#3652](https://github.com/pydantic/pydantic/issues/3652) by [@dolfinus](https://github.com/dolfinus) +* `Config.copy_on_model_validation` does a deep copy and not a shallow one, [#3641](https://github.com/pydantic/pydantic/issues/3641) by [@PrettyWood](https://github.com/PrettyWood) +* fix: clarify that discriminated unions do not support singletons, [#3636](https://github.com/pydantic/pydantic/issues/3636) by [@tommilligan](https://github.com/tommilligan) +* Add `read_text(encoding='utf-8')` for `setup.py`, [#3625](https://github.com/pydantic/pydantic/issues/3625) by [@hswong3i](https://github.com/hswong3i) +* Fix JSON Schema generation for Discriminated Unions within lists, [#3608](https://github.com/pydantic/pydantic/issues/3608) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v1.9.0 (2021-12-31) + +Thank you to pydantic's sponsors: +@sthagen, [@timdrijvers](https://github.com/timdrijvers), [@toinbis](https://github.com/toinbis), [@koxudaxi](https://github.com/koxudaxi), [@ginomempin](https://github.com/ginomempin), [@primer-io](https://github.com/primer-io), [@and-semakin](https://github.com/and-semakin), [@westonsteimel](https://github.com/westonsteimel), [@reillysiemens](https://github.com/reillysiemens), +@es3n1n, [@jokull](https://github.com/jokull), [@JonasKs](https://github.com/JonasKs), [@Rehket](https://github.com/Rehket), [@corleyma](https://github.com/corleyma), [@daddycocoaman](https://github.com/daddycocoaman), [@hardbyte](https://github.com/hardbyte), [@datarootsio](https://github.com/datarootsio), [@jodal](https://github.com/jodal), [@aminalaee](https://github.com/aminalaee), [@rafsaf](https://github.com/rafsaf), +@jqueguiner, [@chdsbd](https://github.com/chdsbd), [@kevinalh](https://github.com/kevinalh), [@Mazyod](https://github.com/Mazyod), [@grillazz](https://github.com/grillazz), [@JonasKs](https://github.com/JonasKs), [@simw](https://github.com/simw), [@leynier](https://github.com/leynier), [@xfenix](https://github.com/xfenix) +for their kind support. + +### Highlights + +* add Python 3.10 support, [#2885](https://github.com/pydantic/pydantic/issues/2885) by [@PrettyWood](https://github.com/PrettyWood) +* [Discriminated unions](https://pydantic-docs.helpmanual.io/usage/types/#discriminated-unions-aka-tagged-unions), [#619](https://github.com/pydantic/pydantic/issues/619) by [@PrettyWood](https://github.com/PrettyWood) +* [`Config.smart_union` for better union logic](https://pydantic-docs.helpmanual.io/usage/model_config/#smart-union), [#2092](https://github.com/pydantic/pydantic/issues/2092) by [@PrettyWood](https://github.com/PrettyWood) +* Binaries for Macos M1 CPUs, [#3498](https://github.com/pydantic/pydantic/issues/3498) by [@samuelcolvin](https://github.com/samuelcolvin) +* Complex types can be set via [nested environment variables](https://pydantic-docs.helpmanual.io/usage/settings/#parsing-environment-variable-values), e.g. `foo___bar`, [#3159](https://github.com/pydantic/pydantic/issues/3159) by [@Air-Mark](https://github.com/Air-Mark) +* add a dark mode to _pydantic_ documentation, [#2913](https://github.com/pydantic/pydantic/issues/2913) by [@gbdlin](https://github.com/gbdlin) +* Add support for autocomplete in VS Code via `__dataclass_transform__`, [#2721](https://github.com/pydantic/pydantic/issues/2721) by [@tiangolo](https://github.com/tiangolo) +* Add "exclude" as a field parameter so that it can be configured using model config, [#660](https://github.com/pydantic/pydantic/issues/660) by [@daviskirk](https://github.com/daviskirk) + +### v1.9.0 (2021-12-31) Changes + +* Apply `update_forward_refs` to `Config.json_encodes` prevent name clashes in types defined via strings, [#3583](https://github.com/pydantic/pydantic/issues/3583) by [@samuelcolvin](https://github.com/samuelcolvin) +* Extend pydantic's mypy plugin to support mypy versions `0.910`, `0.920`, `0.921` & `0.930`, [#3573](https://github.com/pydantic/pydantic/issues/3573) & [#3594](https://github.com/pydantic/pydantic/issues/3594) by [@PrettyWood](https://github.com/PrettyWood), [@christianbundy](https://github.com/christianbundy), [@samuelcolvin](https://github.com/samuelcolvin) + +### v1.9.0a2 (2021-12-24) Changes + +* support generic models with discriminated union, [#3551](https://github.com/pydantic/pydantic/issues/3551) by [@PrettyWood](https://github.com/PrettyWood) +* keep old behaviour of `json()` by default, [#3542](https://github.com/pydantic/pydantic/issues/3542) by [@PrettyWood](https://github.com/PrettyWood) +* Removed typing-only `__root__` attribute from `BaseModel`, [#3540](https://github.com/pydantic/pydantic/issues/3540) by [@layday](https://github.com/layday) +* Build Python 3.10 wheels, [#3539](https://github.com/pydantic/pydantic/issues/3539) by [@mbachry](https://github.com/mbachry) +* Fix display of `extra` fields with model `__repr__`, [#3234](https://github.com/pydantic/pydantic/issues/3234) by [@cocolman](https://github.com/cocolman) +* models copied via `Config.copy_on_model_validation` always have all fields, [#3201](https://github.com/pydantic/pydantic/issues/3201) by [@PrettyWood](https://github.com/PrettyWood) +* nested ORM from nested dictionaries, [#3182](https://github.com/pydantic/pydantic/issues/3182) by [@PrettyWood](https://github.com/PrettyWood) +* fix link to discriminated union section by [@PrettyWood](https://github.com/PrettyWood) + +### v1.9.0a1 (2021-12-18) Changes + +* Add support for `Decimal`-specific validation configurations in `Field()`, additionally to using `condecimal()`, + to allow better support from editors and tooling, [#3507](https://github.com/pydantic/pydantic/issues/3507) by [@tiangolo](https://github.com/tiangolo) +* Add `arm64` binaries suitable for MacOS with an M1 CPU to PyPI, [#3498](https://github.com/pydantic/pydantic/issues/3498) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix issue where `None` was considered invalid when using a `Union` type containing `Any` or `object`, [#3444](https://github.com/pydantic/pydantic/issues/3444) by [@tharradine](https://github.com/tharradine) +* When generating field schema, pass optional `field` argument (of type + `pydantic.fields.ModelField`) to `__modify_schema__()` if present, [#3434](https://github.com/pydantic/pydantic/issues/3434) by [@jasujm](https://github.com/jasujm) +* Fix issue when pydantic fail to parse `typing.ClassVar` string type annotation, [#3401](https://github.com/pydantic/pydantic/issues/3401) by [@uriyyo](https://github.com/uriyyo) +* Mention Python >= 3.9.2 as an alternative to `typing_extensions.TypedDict`, [#3374](https://github.com/pydantic/pydantic/issues/3374) by [@BvB93](https://github.com/BvB93) +* Changed the validator method name in the [Custom Errors example](https://pydantic-docs.helpmanual.io/usage/models/#custom-errors) + to more accurately describe what the validator is doing; changed from `name_must_contain_space` to ` value_must_equal_bar`, [#3327](https://github.com/pydantic/pydantic/issues/3327) by [@michaelrios28](https://github.com/michaelrios28) +* Add `AmqpDsn` class, [#3254](https://github.com/pydantic/pydantic/issues/3254) by [@kludex](https://github.com/kludex) +* Always use `Enum` value as default in generated JSON schema, [#3190](https://github.com/pydantic/pydantic/issues/3190) by [@joaommartins](https://github.com/joaommartins) +* Add support for Mypy 0.920, [#3175](https://github.com/pydantic/pydantic/issues/3175) by [@christianbundy](https://github.com/christianbundy) +* `validate_arguments` now supports `extra` customization (used to always be `Extra.forbid`), [#3161](https://github.com/pydantic/pydantic/issues/3161) by [@PrettyWood](https://github.com/PrettyWood) +* Complex types can be set by nested environment variables, [#3159](https://github.com/pydantic/pydantic/issues/3159) by [@Air-Mark](https://github.com/Air-Mark) +* Fix mypy plugin to collect fields based on `pydantic.utils.is_valid_field` so that it ignores untyped private variables, [#3146](https://github.com/pydantic/pydantic/issues/3146) by [@hi-ogawa](https://github.com/hi-ogawa) +* fix `validate_arguments` issue with `Config.validate_all`, [#3135](https://github.com/pydantic/pydantic/issues/3135) by [@PrettyWood](https://github.com/PrettyWood) +* avoid dict coercion when using dict subclasses as field type, [#3122](https://github.com/pydantic/pydantic/issues/3122) by [@PrettyWood](https://github.com/PrettyWood) +* add support for `object` type, [#3062](https://github.com/pydantic/pydantic/issues/3062) by [@PrettyWood](https://github.com/PrettyWood) +* Updates pydantic dataclasses to keep `_special` properties on parent classes, [#3043](https://github.com/pydantic/pydantic/issues/3043) by [@zulrang](https://github.com/zulrang) +* Add a `TypedDict` class for error objects, [#3038](https://github.com/pydantic/pydantic/issues/3038) by [@matthewhughes934](https://github.com/matthewhughes934) +* Fix support for using a subclass of an annotation as a default, [#3018](https://github.com/pydantic/pydantic/issues/3018) by [@JacobHayes](https://github.com/JacobHayes) +* make `create_model_from_typeddict` mypy compliant, [#3008](https://github.com/pydantic/pydantic/issues/3008) by [@PrettyWood](https://github.com/PrettyWood) +* Make multiple inheritance work when using `PrivateAttr`, [#2989](https://github.com/pydantic/pydantic/issues/2989) by [@hmvp](https://github.com/hmvp) +* Parse environment variables as JSON, if they have a `Union` type with a complex subfield, [#2936](https://github.com/pydantic/pydantic/issues/2936) by [@cbartz](https://github.com/cbartz) +* Prevent `StrictStr` permitting `Enum` values where the enum inherits from `str`, [#2929](https://github.com/pydantic/pydantic/issues/2929) by [@samuelcolvin](https://github.com/samuelcolvin) +* Make `SecretsSettingsSource` parse values being assigned to fields of complex types when sourced from a secrets file, + just as when sourced from environment variables, [#2917](https://github.com/pydantic/pydantic/issues/2917) by [@davidmreed](https://github.com/davidmreed) +* add a dark mode to _pydantic_ documentation, [#2913](https://github.com/pydantic/pydantic/issues/2913) by [@gbdlin](https://github.com/gbdlin) +* Make `pydantic-mypy` plugin compatible with `pyproject.toml` configuration, consistent with `mypy` changes. + See the [doc](https://pydantic-docs.helpmanual.io/mypy_plugin/#configuring-the-plugin) for more information, [#2908](https://github.com/pydantic/pydantic/issues/2908) by [@jrwalk](https://github.com/jrwalk) +* add Python 3.10 support, [#2885](https://github.com/pydantic/pydantic/issues/2885) by [@PrettyWood](https://github.com/PrettyWood) +* Correctly parse generic models with `Json[T]`, [#2860](https://github.com/pydantic/pydantic/issues/2860) by [@geekingfrog](https://github.com/geekingfrog) +* Update contrib docs re: Python version to use for building docs, [#2856](https://github.com/pydantic/pydantic/issues/2856) by [@paxcodes](https://github.com/paxcodes) +* Clarify documentation about _pydantic_'s support for custom validation and strict type checking, + despite _pydantic_ being primarily a parsing library, [#2855](https://github.com/pydantic/pydantic/issues/2855) by [@paxcodes](https://github.com/paxcodes) +* Fix schema generation for `Deque` fields, [#2810](https://github.com/pydantic/pydantic/issues/2810) by [@sergejkozin](https://github.com/sergejkozin) +* fix an edge case when mixing constraints and `Literal`, [#2794](https://github.com/pydantic/pydantic/issues/2794) by [@PrettyWood](https://github.com/PrettyWood) +* Fix postponed annotation resolution for `NamedTuple` and `TypedDict` when they're used directly as the type of fields + within Pydantic models, [#2760](https://github.com/pydantic/pydantic/issues/2760) by [@jameysharp](https://github.com/jameysharp) +* Fix bug when `mypy` plugin fails on `construct` method call for `BaseSettings` derived classes, [#2753](https://github.com/pydantic/pydantic/issues/2753) by [@uriyyo](https://github.com/uriyyo) +* Add function overloading for a `pydantic.create_model` function, [#2748](https://github.com/pydantic/pydantic/issues/2748) by [@uriyyo](https://github.com/uriyyo) +* Fix mypy plugin issue with self field declaration, [#2743](https://github.com/pydantic/pydantic/issues/2743) by [@uriyyo](https://github.com/uriyyo) +* The colon at the end of the line "The fields which were supplied when user was initialised:" suggests that the code following it is related. + Changed it to a period, [#2733](https://github.com/pydantic/pydantic/issues/2733) by [@krisaoe](https://github.com/krisaoe) +* Renamed variable `schema` to `schema_` to avoid shadowing of global variable name, [#2724](https://github.com/pydantic/pydantic/issues/2724) by [@shahriyarr](https://github.com/shahriyarr) +* Add support for autocomplete in VS Code via `__dataclass_transform__`, [#2721](https://github.com/pydantic/pydantic/issues/2721) by [@tiangolo](https://github.com/tiangolo) +* add missing type annotations in `BaseConfig` and handle `max_length = 0`, [#2719](https://github.com/pydantic/pydantic/issues/2719) by [@PrettyWood](https://github.com/PrettyWood) +* Change `orm_mode` checking to allow recursive ORM mode parsing with dicts, [#2718](https://github.com/pydantic/pydantic/issues/2718) by [@nuno-andre](https://github.com/nuno-andre) +* Add episode 313 of the *Talk Python To Me* podcast, where Michael Kennedy and Samuel Colvin discuss *pydantic*, to the docs, [#2712](https://github.com/pydantic/pydantic/issues/2712) by [@RatulMaharaj](https://github.com/RatulMaharaj) +* fix JSON schema generation when a field is of type `NamedTuple` and has a default value, [#2707](https://github.com/pydantic/pydantic/issues/2707) by [@PrettyWood](https://github.com/PrettyWood) +* `Enum` fields now properly support extra kwargs in schema generation, [#2697](https://github.com/pydantic/pydantic/issues/2697) by [@sammchardy](https://github.com/sammchardy) +* **Breaking Change, see [#3780](https://github.com/pydantic/pydantic/issues/3780)**: Make serialization of referenced pydantic models possible, [#2650](https://github.com/pydantic/pydantic/issues/2650) by [@PrettyWood](https://github.com/PrettyWood) +* Add `uniqueItems` option to `ConstrainedList`, [#2618](https://github.com/pydantic/pydantic/issues/2618) by [@nuno-andre](https://github.com/nuno-andre) +* Try to evaluate forward refs automatically at model creation, [#2588](https://github.com/pydantic/pydantic/issues/2588) by [@uriyyo](https://github.com/uriyyo) +* Switch docs preview and coverage display to use [smokeshow](https://smokeshow.helpmanual.io/), [#2580](https://github.com/pydantic/pydantic/issues/2580) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add `__version__` attribute to pydantic module, [#2572](https://github.com/pydantic/pydantic/issues/2572) by [@paxcodes](https://github.com/paxcodes) +* Add `postgresql+asyncpg`, `postgresql+pg8000`, `postgresql+psycopg2`, `postgresql+psycopg2cffi`, `postgresql+py-postgresql` + and `postgresql+pygresql` schemes for `PostgresDsn`, [#2567](https://github.com/pydantic/pydantic/issues/2567) by [@postgres-asyncpg](https://github.com/postgres-asyncpg) +* Enable the Hypothesis plugin to generate a constrained decimal when the `decimal_places` argument is specified, [#2524](https://github.com/pydantic/pydantic/issues/2524) by [@cwe5590](https://github.com/cwe5590) +* Allow `collections.abc.Callable` to be used as type in Python 3.9, [#2519](https://github.com/pydantic/pydantic/issues/2519) by [@daviskirk](https://github.com/daviskirk) +* Documentation update how to custom compile pydantic when using pip install, small change in `setup.py` + to allow for custom CFLAGS when compiling, [#2517](https://github.com/pydantic/pydantic/issues/2517) by [@peterroelants](https://github.com/peterroelants) +* remove side effect of `default_factory` to run it only once even if `Config.validate_all` is set, [#2515](https://github.com/pydantic/pydantic/issues/2515) by [@PrettyWood](https://github.com/PrettyWood) +* Add lookahead to ip regexes for `AnyUrl` hosts. This allows urls with DNS labels + looking like IPs to validate as they are perfectly valid host names, [#2512](https://github.com/pydantic/pydantic/issues/2512) by [@sbv-csis](https://github.com/sbv-csis) +* Set `minItems` and `maxItems` in generated JSON schema for fixed-length tuples, [#2497](https://github.com/pydantic/pydantic/issues/2497) by [@PrettyWood](https://github.com/PrettyWood) +* Add `strict` argument to `conbytes`, [#2489](https://github.com/pydantic/pydantic/issues/2489) by [@koxudaxi](https://github.com/koxudaxi) +* Support user defined generic field types in generic models, [#2465](https://github.com/pydantic/pydantic/issues/2465) by [@daviskirk](https://github.com/daviskirk) +* Add an example and a short explanation of subclassing `GetterDict` to docs, [#2463](https://github.com/pydantic/pydantic/issues/2463) by [@nuno-andre](https://github.com/nuno-andre) +* add `KafkaDsn` type, `HttpUrl` now has default port 80 for http and 443 for https, [#2447](https://github.com/pydantic/pydantic/issues/2447) by [@MihanixA](https://github.com/MihanixA) +* Add `PastDate` and `FutureDate` types, [#2425](https://github.com/pydantic/pydantic/issues/2425) by [@Kludex](https://github.com/Kludex) +* Support generating schema for `Generic` fields with subtypes, [#2375](https://github.com/pydantic/pydantic/issues/2375) by [@maximberg](https://github.com/maximberg) +* fix(encoder): serialize `NameEmail` to str, [#2341](https://github.com/pydantic/pydantic/issues/2341) by [@alecgerona](https://github.com/alecgerona) +* add `Config.smart_union` to prevent coercion in `Union` if possible, see + [the doc](https://pydantic-docs.helpmanual.io/usage/model_config/#smart-union) for more information, [#2092](https://github.com/pydantic/pydantic/issues/2092) by [@PrettyWood](https://github.com/PrettyWood) +* Add ability to use `typing.Counter` as a model field type, [#2060](https://github.com/pydantic/pydantic/issues/2060) by [@uriyyo](https://github.com/uriyyo) +* Add parameterised subclasses to `__bases__` when constructing new parameterised classes, so that `A <: B => A[int] <: B[int]`, [#2007](https://github.com/pydantic/pydantic/issues/2007) by [@diabolo-dan](https://github.com/diabolo-dan) +* Create `FileUrl` type that allows URLs that conform to [RFC 8089](https://tools.ietf.org/html/rfc8089#section-2). + Add `host_required` parameter, which is `True` by default (`AnyUrl` and subclasses), `False` in `RedisDsn`, `FileUrl`, [#1983](https://github.com/pydantic/pydantic/issues/1983) by [@vgerak](https://github.com/vgerak) +* add `confrozenset()`, analogous to `conset()` and `conlist()`, [#1897](https://github.com/pydantic/pydantic/issues/1897) by [@PrettyWood](https://github.com/PrettyWood) +* stop calling parent class `root_validator` if overridden, [#1895](https://github.com/pydantic/pydantic/issues/1895) by [@PrettyWood](https://github.com/PrettyWood) +* Add `repr` (defaults to `True`) parameter to `Field`, to hide it from the default representation of the `BaseModel`, [#1831](https://github.com/pydantic/pydantic/issues/1831) by [@fnep](https://github.com/fnep) +* Accept empty query/fragment URL parts, [#1807](https://github.com/pydantic/pydantic/issues/1807) by [@xavier](https://github.com/xavier) + +## v1.8.2 (2021-05-11) + +!!! warning + A security vulnerability, level "moderate" is fixed in v1.8.2. Please upgrade **ASAP**. + See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) + +* **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` + (or their negative values) does not cause an infinite loop, + see security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) +* fix schema generation with Enum by generating a valid name, [#2575](https://github.com/pydantic/pydantic/issues/2575) by [@PrettyWood](https://github.com/PrettyWood) +* fix JSON schema generation with a `Literal` of an enum member, [#2536](https://github.com/pydantic/pydantic/issues/2536) by [@PrettyWood](https://github.com/PrettyWood) +* Fix bug with configurations declarations that are passed as + keyword arguments during class creation, [#2532](https://github.com/pydantic/pydantic/issues/2532) by [@uriyyo](https://github.com/uriyyo) +* Allow passing `json_encoders` in class kwargs, [#2521](https://github.com/pydantic/pydantic/issues/2521) by [@layday](https://github.com/layday) +* support arbitrary types with custom `__eq__`, [#2483](https://github.com/pydantic/pydantic/issues/2483) by [@PrettyWood](https://github.com/PrettyWood) +* support `Annotated` in `validate_arguments` and in generic models with Python 3.9, [#2483](https://github.com/pydantic/pydantic/issues/2483) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.8.1 (2021-03-03) + +Bug fixes for regressions and new features from `v1.8` + +* allow elements of `Config.field` to update elements of a `Field`, [#2461](https://github.com/pydantic/pydantic/issues/2461) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix validation with a `BaseModel` field and a custom root type, [#2449](https://github.com/pydantic/pydantic/issues/2449) by [@PrettyWood](https://github.com/PrettyWood) +* expose `Pattern` encoder to `fastapi`, [#2444](https://github.com/pydantic/pydantic/issues/2444) by [@PrettyWood](https://github.com/PrettyWood) +* enable the Hypothesis plugin to generate a constrained float when the `multiple_of` argument is specified, [#2442](https://github.com/pydantic/pydantic/issues/2442) by [@tobi-lipede-oodle](https://github.com/tobi-lipede-oodle) +* Avoid `RecursionError` when using some types like `Enum` or `Literal` with generic models, [#2436](https://github.com/pydantic/pydantic/issues/2436) by [@PrettyWood](https://github.com/PrettyWood) +* do not overwrite declared `__hash__` in subclasses of a model, [#2422](https://github.com/pydantic/pydantic/issues/2422) by [@PrettyWood](https://github.com/PrettyWood) +* fix `mypy` complaints on `Path` and `UUID` related custom types, [#2418](https://github.com/pydantic/pydantic/issues/2418) by [@PrettyWood](https://github.com/PrettyWood) +* Support properly variable length tuples of compound types, [#2416](https://github.com/pydantic/pydantic/issues/2416) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.8 (2021-02-26) + +Thank you to pydantic's sponsors: +@jorgecarleitao, [@BCarley](https://github.com/BCarley), [@chdsbd](https://github.com/chdsbd), [@tiangolo](https://github.com/tiangolo), [@matin](https://github.com/matin), [@linusg](https://github.com/linusg), [@kevinalh](https://github.com/kevinalh), [@koxudaxi](https://github.com/koxudaxi), [@timdrijvers](https://github.com/timdrijvers), [@mkeen](https://github.com/mkeen), [@meadsteve](https://github.com/meadsteve), +@ginomempin, [@primer-io](https://github.com/primer-io), [@and-semakin](https://github.com/and-semakin), [@tomthorogood](https://github.com/tomthorogood), [@AjitZK](https://github.com/AjitZK), [@westonsteimel](https://github.com/westonsteimel), [@Mazyod](https://github.com/Mazyod), [@christippett](https://github.com/christippett), [@CarlosDomingues](https://github.com/CarlosDomingues), +@Kludex, [@r-m-n](https://github.com/r-m-n) +for their kind support. + +### Highlights + +* [Hypothesis plugin](https://pydantic-docs.helpmanual.io/hypothesis_plugin/) for testing, [#2097](https://github.com/pydantic/pydantic/issues/2097) by [@Zac-HD](https://github.com/Zac-HD) +* support for [`NamedTuple` and `TypedDict`](https://pydantic-docs.helpmanual.io/usage/types/#annotated-types), [#2216](https://github.com/pydantic/pydantic/issues/2216) by [@PrettyWood](https://github.com/PrettyWood) +* Support [`Annotated` hints on model fields](https://pydantic-docs.helpmanual.io/usage/schema/#typingannotated-fields), [#2147](https://github.com/pydantic/pydantic/issues/2147) by [@JacobHayes](https://github.com/JacobHayes) +* [`frozen` parameter on `Config`](https://pydantic-docs.helpmanual.io/usage/model_config/) to allow models to be hashed, [#1880](https://github.com/pydantic/pydantic/issues/1880) by [@rhuille](https://github.com/rhuille) + +### Changes + +* **Breaking Change**, remove old deprecation aliases from v1, [#2415](https://github.com/pydantic/pydantic/issues/2415) by [@samuelcolvin](https://github.com/samuelcolvin): + * remove notes on migrating to v1 in docs + * remove `Schema` which was replaced by `Field` + * remove `Config.case_insensitive` which was replaced by `Config.case_sensitive` (default `False`) + * remove `Config.allow_population_by_alias` which was replaced by `Config.allow_population_by_field_name` + * remove `model.fields` which was replaced by `model.__fields__` + * remove `model.to_string()` which was replaced by `str(model)` + * remove `model.__values__` which was replaced by `model.__dict__` +* **Breaking Change:** always validate only first sublevel items with `each_item`. + There were indeed some edge cases with some compound types where the validated items were the last sublevel ones, [#1933](https://github.com/pydantic/pydantic/issues/1933) by [@PrettyWood](https://github.com/PrettyWood) +* Update docs extensions to fix local syntax highlighting, [#2400](https://github.com/pydantic/pydantic/issues/2400) by [@daviskirk](https://github.com/daviskirk) +* fix: allow `utils.lenient_issubclass` to handle `typing.GenericAlias` objects like `list[str]` in Python >= 3.9, [#2399](https://github.com/pydantic/pydantic/issues/2399) by [@daviskirk](https://github.com/daviskirk) +* Improve field declaration for _pydantic_ `dataclass` by allowing the usage of _pydantic_ `Field` or `'metadata'` kwarg of `dataclasses.field`, [#2384](https://github.com/pydantic/pydantic/issues/2384) by [@PrettyWood](https://github.com/PrettyWood) +* Making `typing-extensions` a required dependency, [#2368](https://github.com/pydantic/pydantic/issues/2368) by [@samuelcolvin](https://github.com/samuelcolvin) +* Make `resolve_annotations` more lenient, allowing for missing modules, [#2363](https://github.com/pydantic/pydantic/issues/2363) by [@samuelcolvin](https://github.com/samuelcolvin) +* Allow configuring models through class kwargs, [#2356](https://github.com/pydantic/pydantic/issues/2356) by [@Bobronium](https://github.com/Bobronium) +* Prevent `Mapping` subclasses from always being coerced to `dict`, [#2325](https://github.com/pydantic/pydantic/issues/2325) by [@ofek](https://github.com/ofek) +* fix: allow `None` for type `Optional[conset / conlist]`, [#2320](https://github.com/pydantic/pydantic/issues/2320) by [@PrettyWood](https://github.com/PrettyWood) +* Support empty tuple type, [#2318](https://github.com/pydantic/pydantic/issues/2318) by [@PrettyWood](https://github.com/PrettyWood) +* fix: `python_requires` metadata to require >=3.6.1, [#2306](https://github.com/pydantic/pydantic/issues/2306) by [@hukkinj1](https://github.com/hukkinj1) +* Properly encode `Decimal` with, or without any decimal places, [#2293](https://github.com/pydantic/pydantic/issues/2293) by [@hultner](https://github.com/hultner) +* fix: update `__fields_set__` in `BaseModel.copy(update=…)`, [#2290](https://github.com/pydantic/pydantic/issues/2290) by [@PrettyWood](https://github.com/PrettyWood) +* fix: keep order of fields with `BaseModel.construct()`, [#2281](https://github.com/pydantic/pydantic/issues/2281) by [@PrettyWood](https://github.com/PrettyWood) +* Support generating schema for Generic fields, [#2262](https://github.com/pydantic/pydantic/issues/2262) by [@maximberg](https://github.com/maximberg) +* Fix `validate_decorator` so `**kwargs` doesn't exclude values when the keyword + has the same name as the `*args` or `**kwargs` names, [#2251](https://github.com/pydantic/pydantic/issues/2251) by [@cybojenix](https://github.com/cybojenix) +* Prevent overriding positional arguments with keyword arguments in + `validate_arguments`, as per behaviour with native functions, [#2249](https://github.com/pydantic/pydantic/issues/2249) by [@cybojenix](https://github.com/cybojenix) +* add documentation for `con*` type functions, [#2242](https://github.com/pydantic/pydantic/issues/2242) by [@tayoogunbiyi](https://github.com/tayoogunbiyi) +* Support custom root type (aka `__root__`) when using `parse_obj()` with nested models, [#2238](https://github.com/pydantic/pydantic/issues/2238) by [@PrettyWood](https://github.com/PrettyWood) +* Support custom root type (aka `__root__`) with `from_orm()`, [#2237](https://github.com/pydantic/pydantic/issues/2237) by [@PrettyWood](https://github.com/PrettyWood) +* ensure cythonized functions are left untouched when creating models, based on [#1944](https://github.com/pydantic/pydantic/issues/1944) by [@kollmats](https://github.com/kollmats), [#2228](https://github.com/pydantic/pydantic/issues/2228) by [@samuelcolvin](https://github.com/samuelcolvin) +* Resolve forward refs for stdlib dataclasses converted into _pydantic_ ones, [#2220](https://github.com/pydantic/pydantic/issues/2220) by [@PrettyWood](https://github.com/PrettyWood) +* Add support for `NamedTuple` and `TypedDict` types. + Those two types are now handled and validated when used inside `BaseModel` or _pydantic_ `dataclass`. + Two utils are also added `create_model_from_namedtuple` and `create_model_from_typeddict`, [#2216](https://github.com/pydantic/pydantic/issues/2216) by [@PrettyWood](https://github.com/PrettyWood) +* Do not ignore annotated fields when type is `Union[Type[...], ...]`, [#2213](https://github.com/pydantic/pydantic/issues/2213) by [@PrettyWood](https://github.com/PrettyWood) +* Raise a user-friendly `TypeError` when a `root_validator` does not return a `dict` (e.g. `None`), [#2209](https://github.com/pydantic/pydantic/issues/2209) by [@masalim2](https://github.com/masalim2) +* Add a `FrozenSet[str]` type annotation to the `allowed_schemes` argument on the `strict_url` field type, [#2198](https://github.com/pydantic/pydantic/issues/2198) by [@Midnighter](https://github.com/Midnighter) +* add `allow_mutation` constraint to `Field`, [#2195](https://github.com/pydantic/pydantic/issues/2195) by [@sblack-usu](https://github.com/sblack-usu) +* Allow `Field` with a `default_factory` to be used as an argument to a function + decorated with `validate_arguments`, [#2176](https://github.com/pydantic/pydantic/issues/2176) by [@thomascobb](https://github.com/thomascobb) +* Allow non-existent secrets directory by only issuing a warning, [#2175](https://github.com/pydantic/pydantic/issues/2175) by [@davidolrik](https://github.com/davidolrik) +* fix URL regex to parse fragment without query string, [#2168](https://github.com/pydantic/pydantic/issues/2168) by [@andrewmwhite](https://github.com/andrewmwhite) +* fix: ensure to always return one of the values in `Literal` field type, [#2166](https://github.com/pydantic/pydantic/issues/2166) by [@PrettyWood](https://github.com/PrettyWood) +* Support `typing.Annotated` hints on model fields. A `Field` may now be set in the type hint with `Annotated[..., Field(...)`; all other annotations are ignored but still visible with `get_type_hints(..., include_extras=True)`, [#2147](https://github.com/pydantic/pydantic/issues/2147) by [@JacobHayes](https://github.com/JacobHayes) +* Added `StrictBytes` type as well as `strict=False` option to `ConstrainedBytes`, [#2136](https://github.com/pydantic/pydantic/issues/2136) by [@rlizzo](https://github.com/rlizzo) +* added `Config.anystr_lower` and `to_lower` kwarg to `constr` and `conbytes`, [#2134](https://github.com/pydantic/pydantic/issues/2134) by [@tayoogunbiyi](https://github.com/tayoogunbiyi) +* Support plain `typing.Tuple` type, [#2132](https://github.com/pydantic/pydantic/issues/2132) by [@PrettyWood](https://github.com/PrettyWood) +* Add a bound method `validate` to functions decorated with `validate_arguments` + to validate parameters without actually calling the function, [#2127](https://github.com/pydantic/pydantic/issues/2127) by [@PrettyWood](https://github.com/PrettyWood) +* Add the ability to customize settings sources (add / disable / change priority order), [#2107](https://github.com/pydantic/pydantic/issues/2107) by [@kozlek](https://github.com/kozlek) +* Fix mypy complaints about most custom _pydantic_ types, [#2098](https://github.com/pydantic/pydantic/issues/2098) by [@PrettyWood](https://github.com/PrettyWood) +* Add a [Hypothesis](https://hypothesis.readthedocs.io/) plugin for easier [property-based testing](https://increment.com/testing/in-praise-of-property-based-testing/) with Pydantic's custom types - [usage details here](https://pydantic-docs.helpmanual.io/hypothesis_plugin/), [#2097](https://github.com/pydantic/pydantic/issues/2097) by [@Zac-HD](https://github.com/Zac-HD) +* add validator for `None`, `NoneType` or `Literal[None]`, [#2095](https://github.com/pydantic/pydantic/issues/2095) by [@PrettyWood](https://github.com/PrettyWood) +* Handle properly fields of type `Callable` with a default value, [#2094](https://github.com/pydantic/pydantic/issues/2094) by [@PrettyWood](https://github.com/PrettyWood) +* Updated `create_model` return type annotation to return type which inherits from `__base__` argument, [#2071](https://github.com/pydantic/pydantic/issues/2071) by [@uriyyo](https://github.com/uriyyo) +* Add merged `json_encoders` inheritance, [#2064](https://github.com/pydantic/pydantic/issues/2064) by [@art049](https://github.com/art049) +* allow overwriting `ClassVar`s in sub-models without having to re-annotate them, [#2061](https://github.com/pydantic/pydantic/issues/2061) by [@layday](https://github.com/layday) +* add default encoder for `Pattern` type, [#2045](https://github.com/pydantic/pydantic/issues/2045) by [@PrettyWood](https://github.com/PrettyWood) +* Add `NonNegativeInt`, `NonPositiveInt`, `NonNegativeFloat`, `NonPositiveFloat`, [#1975](https://github.com/pydantic/pydantic/issues/1975) by [@mdavis-xyz](https://github.com/mdavis-xyz) +* Use % for percentage in string format of colors, [#1960](https://github.com/pydantic/pydantic/issues/1960) by [@EdwardBetts](https://github.com/EdwardBetts) +* Fixed issue causing `KeyError` to be raised when building schema from multiple `BaseModel` with the same names declared in separate classes, [#1912](https://github.com/pydantic/pydantic/issues/1912) by [@JSextonn](https://github.com/JSextonn) +* Add `rediss` (Redis over SSL) protocol to `RedisDsn` + Allow URLs without `user` part (e.g., `rediss://:pass@localhost`), [#1911](https://github.com/pydantic/pydantic/issues/1911) by [@TrDex](https://github.com/TrDex) +* Add a new `frozen` boolean parameter to `Config` (default: `False`). + Setting `frozen=True` does everything that `allow_mutation=False` does, and also generates a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the attributes are hashable, [#1880](https://github.com/pydantic/pydantic/issues/1880) by [@rhuille](https://github.com/rhuille) +* fix schema generation with multiple Enums having the same name, [#1857](https://github.com/pydantic/pydantic/issues/1857) by [@PrettyWood](https://github.com/PrettyWood) +* Added support for 13/19 digits VISA credit cards in `PaymentCardNumber` type, [#1416](https://github.com/pydantic/pydantic/issues/1416) by [@AlexanderSov](https://github.com/AlexanderSov) +* fix: prevent `RecursionError` while using recursive `GenericModel`s, [#1370](https://github.com/pydantic/pydantic/issues/1370) by [@xppt](https://github.com/xppt) +* use `enum` for `typing.Literal` in JSON schema, [#1350](https://github.com/pydantic/pydantic/issues/1350) by [@PrettyWood](https://github.com/PrettyWood) +* Fix: some recursive models did not require `update_forward_refs` and silently behaved incorrectly, [#1201](https://github.com/pydantic/pydantic/issues/1201) by [@PrettyWood](https://github.com/PrettyWood) +* Fix bug where generic models with fields where the typevar is nested in another type `a: List[T]` are considered to be concrete. This allows these models to be subclassed and composed as expected, [#947](https://github.com/pydantic/pydantic/issues/947) by [@daviskirk](https://github.com/daviskirk) +* Add `Config.copy_on_model_validation` flag. When set to `False`, _pydantic_ will keep models used as fields + untouched on validation instead of reconstructing (copying) them, [#265](https://github.com/pydantic/pydantic/issues/265) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.7.4 (2021-05-11) + +* **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` + (or their negative values) does not cause an infinite loop, + See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) + +## v1.7.3 (2020-11-30) + +Thank you to pydantic's sponsors: +@timdrijvers, [@BCarley](https://github.com/BCarley), [@chdsbd](https://github.com/chdsbd), [@tiangolo](https://github.com/tiangolo), [@matin](https://github.com/matin), [@linusg](https://github.com/linusg), [@kevinalh](https://github.com/kevinalh), [@jorgecarleitao](https://github.com/jorgecarleitao), [@koxudaxi](https://github.com/koxudaxi), [@primer-api](https://github.com/primer-api), +@mkeen, [@meadsteve](https://github.com/meadsteve) for their kind support. + +* fix: set right default value for required (optional) fields, [#2142](https://github.com/pydantic/pydantic/issues/2142) by [@PrettyWood](https://github.com/PrettyWood) +* fix: support `underscore_attrs_are_private` with generic models, [#2138](https://github.com/pydantic/pydantic/issues/2138) by [@PrettyWood](https://github.com/PrettyWood) +* fix: update all modified field values in `root_validator` when `validate_assignment` is on, [#2116](https://github.com/pydantic/pydantic/issues/2116) by [@PrettyWood](https://github.com/PrettyWood) +* Allow pickling of `pydantic.dataclasses.dataclass` dynamically created from a built-in `dataclasses.dataclass`, [#2111](https://github.com/pydantic/pydantic/issues/2111) by [@aimestereo](https://github.com/aimestereo) +* Fix a regression where Enum fields would not propagate keyword arguments to the schema, [#2109](https://github.com/pydantic/pydantic/issues/2109) by [@bm424](https://github.com/bm424) +* Ignore `__doc__` as private attribute when `Config.underscore_attrs_are_private` is set, [#2090](https://github.com/pydantic/pydantic/issues/2090) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.7.2 (2020-11-01) + +* fix slow `GenericModel` concrete model creation, allow `GenericModel` concrete name reusing in module, [#2078](https://github.com/pydantic/pydantic/issues/2078) by [@Bobronium](https://github.com/Bobronium) +* keep the order of the fields when `validate_assignment` is set, [#2073](https://github.com/pydantic/pydantic/issues/2073) by [@PrettyWood](https://github.com/PrettyWood) +* forward all the params of the stdlib `dataclass` when converted into _pydantic_ `dataclass`, [#2065](https://github.com/pydantic/pydantic/issues/2065) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.7.1 (2020-10-28) + +Thank you to pydantic's sponsors: +@timdrijvers, [@BCarley](https://github.com/BCarley), [@chdsbd](https://github.com/chdsbd), [@tiangolo](https://github.com/tiangolo), [@matin](https://github.com/matin), [@linusg](https://github.com/linusg), [@kevinalh](https://github.com/kevinalh), [@jorgecarleitao](https://github.com/jorgecarleitao), [@koxudaxi](https://github.com/koxudaxi), [@primer-api](https://github.com/primer-api), [@mkeen](https://github.com/mkeen) +for their kind support. + +* fix annotation of `validate_arguments` when passing configuration as argument, [#2055](https://github.com/pydantic/pydantic/issues/2055) by [@layday](https://github.com/layday) +* Fix mypy assignment error when using `PrivateAttr`, [#2048](https://github.com/pydantic/pydantic/issues/2048) by [@aphedges](https://github.com/aphedges) +* fix `underscore_attrs_are_private` causing `TypeError` when overriding `__init__`, [#2047](https://github.com/pydantic/pydantic/issues/2047) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fixed regression introduced in v1.7 involving exception handling in field validators when `validate_assignment=True`, [#2044](https://github.com/pydantic/pydantic/issues/2044) by [@johnsabath](https://github.com/johnsabath) +* fix: _pydantic_ `dataclass` can inherit from stdlib `dataclass` + and `Config.arbitrary_types_allowed` is supported, [#2042](https://github.com/pydantic/pydantic/issues/2042) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.7 (2020-10-26) + +Thank you to pydantic's sponsors: +@timdrijvers, [@BCarley](https://github.com/BCarley), [@chdsbd](https://github.com/chdsbd), [@tiangolo](https://github.com/tiangolo), [@matin](https://github.com/matin), [@linusg](https://github.com/linusg), [@kevinalh](https://github.com/kevinalh), [@jorgecarleitao](https://github.com/jorgecarleitao), [@koxudaxi](https://github.com/koxudaxi), [@primer-api](https://github.com/primer-api) +for their kind support. + +### Highlights + +* Python 3.9 support, thanks [@PrettyWood](https://github.com/PrettyWood) +* [Private model attributes](https://pydantic-docs.helpmanual.io/usage/models/#private-model-attributes), thanks [@Bobronium](https://github.com/Bobronium) +* ["secrets files" support in `BaseSettings`](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support), thanks [@mdgilene](https://github.com/mdgilene) +* [convert stdlib dataclasses to pydantic dataclasses and use stdlib dataclasses in models](https://pydantic-docs.helpmanual.io/usage/dataclasses/#stdlib-dataclasses-and-pydantic-dataclasses), thanks [@PrettyWood](https://github.com/PrettyWood) + +### Changes + +* **Breaking Change:** remove `__field_defaults__`, add `default_factory` support with `BaseModel.construct`. + Use `.get_default()` method on fields in `__fields__` attribute instead, [#1732](https://github.com/pydantic/pydantic/issues/1732) by [@PrettyWood](https://github.com/PrettyWood) +* Rearrange CI to run linting as a separate job, split install recipes for different tasks, [#2020](https://github.com/pydantic/pydantic/issues/2020) by [@samuelcolvin](https://github.com/samuelcolvin) +* Allows subclasses of generic models to make some, or all, of the superclass's type parameters concrete, while + also defining new type parameters in the subclass, [#2005](https://github.com/pydantic/pydantic/issues/2005) by [@choogeboom](https://github.com/choogeboom) +* Call validator with the correct `values` parameter type in `BaseModel.__setattr__`, + when `validate_assignment = True` in model config, [#1999](https://github.com/pydantic/pydantic/issues/1999) by [@me-ransh](https://github.com/me-ransh) +* Force `fields.Undefined` to be a singleton object, fixing inherited generic model schemas, [#1981](https://github.com/pydantic/pydantic/issues/1981) by [@daviskirk](https://github.com/daviskirk) +* Include tests in source distributions, [#1976](https://github.com/pydantic/pydantic/issues/1976) by [@sbraz](https://github.com/sbraz) +* Add ability to use `min_length/max_length` constraints with secret types, [#1974](https://github.com/pydantic/pydantic/issues/1974) by [@uriyyo](https://github.com/uriyyo) +* Also check `root_validators` when `validate_assignment` is on, [#1971](https://github.com/pydantic/pydantic/issues/1971) by [@PrettyWood](https://github.com/PrettyWood) +* Fix const validators not running when custom validators are present, [#1957](https://github.com/pydantic/pydantic/issues/1957) by [@hmvp](https://github.com/hmvp) +* add `deque` to field types, [#1935](https://github.com/pydantic/pydantic/issues/1935) by [@wozniakty](https://github.com/wozniakty) +* add basic support for Python 3.9, [#1832](https://github.com/pydantic/pydantic/issues/1832) by [@PrettyWood](https://github.com/PrettyWood) +* Fix typo in the anchor of exporting_models.md#modelcopy and incorrect description, [#1821](https://github.com/pydantic/pydantic/issues/1821) by [@KimMachineGun](https://github.com/KimMachineGun) +* Added ability for `BaseSettings` to read "secret files", [#1820](https://github.com/pydantic/pydantic/issues/1820) by [@mdgilene](https://github.com/mdgilene) +* add `parse_raw_as` utility function, [#1812](https://github.com/pydantic/pydantic/issues/1812) by [@PrettyWood](https://github.com/PrettyWood) +* Support home directory relative paths for `dotenv` files (e.g. `~/.env`), [#1803](https://github.com/pydantic/pydantic/issues/1803) by [@PrettyWood](https://github.com/PrettyWood) +* Clarify documentation for `parse_file` to show that the argument + should be a file *path* not a file-like object, [#1794](https://github.com/pydantic/pydantic/issues/1794) by [@mdavis-xyz](https://github.com/mdavis-xyz) +* Fix false positive from mypy plugin when a class nested within a `BaseModel` is named `Model`, [#1770](https://github.com/pydantic/pydantic/issues/1770) by [@selimb](https://github.com/selimb) +* add basic support of Pattern type in schema generation, [#1767](https://github.com/pydantic/pydantic/issues/1767) by [@PrettyWood](https://github.com/PrettyWood) +* Support custom title, description and default in schema of enums, [#1748](https://github.com/pydantic/pydantic/issues/1748) by [@PrettyWood](https://github.com/PrettyWood) +* Properly represent `Literal` Enums when `use_enum_values` is True, [#1747](https://github.com/pydantic/pydantic/issues/1747) by [@noelevans](https://github.com/noelevans) +* Allows timezone information to be added to strings to be formatted as time objects. Permitted formats are `Z` for UTC + or an offset for absolute positive or negative time shifts. Or the timezone data can be omitted, [#1744](https://github.com/pydantic/pydantic/issues/1744) by [@noelevans](https://github.com/noelevans) +* Add stub `__init__` with Python 3.6 signature for `ForwardRef`, [#1738](https://github.com/pydantic/pydantic/issues/1738) by [@sirtelemak](https://github.com/sirtelemak) +* Fix behaviour with forward refs and optional fields in nested models, [#1736](https://github.com/pydantic/pydantic/issues/1736) by [@PrettyWood](https://github.com/PrettyWood) +* add `Enum` and `IntEnum` as valid types for fields, [#1735](https://github.com/pydantic/pydantic/issues/1735) by [@PrettyWood](https://github.com/PrettyWood) +* Change default value of `__module__` argument of `create_model` from `None` to `'pydantic.main'`. + Set reference of created concrete model to it's module to allow pickling (not applied to models created in + functions), [#1686](https://github.com/pydantic/pydantic/issues/1686) by [@Bobronium](https://github.com/Bobronium) +* Add private attributes support, [#1679](https://github.com/pydantic/pydantic/issues/1679) by [@Bobronium](https://github.com/Bobronium) +* add `config` to `@validate_arguments`, [#1663](https://github.com/pydantic/pydantic/issues/1663) by [@samuelcolvin](https://github.com/samuelcolvin) +* Allow descendant Settings models to override env variable names for the fields defined in parent Settings models with + `env` in their `Config`. Previously only `env_prefix` configuration option was applicable, [#1561](https://github.com/pydantic/pydantic/issues/1561) by [@ojomio](https://github.com/ojomio) +* Support `ref_template` when creating schema `$ref`s, [#1479](https://github.com/pydantic/pydantic/issues/1479) by [@kilo59](https://github.com/kilo59) +* Add a `__call__` stub to `PyObject` so that mypy will know that it is callable, [#1352](https://github.com/pydantic/pydantic/issues/1352) by [@brianmaissy](https://github.com/brianmaissy) +* `pydantic.dataclasses.dataclass` decorator now supports built-in `dataclasses.dataclass`. + It is hence possible to convert an existing `dataclass` easily to add *pydantic* validation. + Moreover nested dataclasses are also supported, [#744](https://github.com/pydantic/pydantic/issues/744) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.6.2 (2021-05-11) + +* **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` + (or their negative values) does not cause an infinite loop, + See security advisory [CVE-2021-29510](https://github.com/pydantic/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) + +## v1.6.1 (2020-07-15) + +* fix validation and parsing of nested models with `default_factory`, [#1710](https://github.com/pydantic/pydantic/issues/1710) by [@PrettyWood](https://github.com/PrettyWood) + +## v1.6 (2020-07-11) + +Thank you to pydantic's sponsors: [@matin](https://github.com/matin), [@tiangolo](https://github.com/tiangolo), [@chdsbd](https://github.com/chdsbd), [@jorgecarleitao](https://github.com/jorgecarleitao), and 1 anonymous sponsor for their kind support. + +* Modify validators for `conlist` and `conset` to not have `always=True`, [#1682](https://github.com/pydantic/pydantic/issues/1682) by [@samuelcolvin](https://github.com/samuelcolvin) +* add port check to `AnyUrl` (can't exceed 65536) ports are 16 insigned bits: `0 <= port <= 2**16-1` src: [rfc793 header format](https://tools.ietf.org/html/rfc793#section-3.1), [#1654](https://github.com/pydantic/pydantic/issues/1654) by [@flapili](https://github.com/flapili) +* Document default `regex` anchoring semantics, [#1648](https://github.com/pydantic/pydantic/issues/1648) by [@yurikhan](https://github.com/yurikhan) +* Use `chain.from_iterable` in class_validators.py. This is a faster and more idiomatic way of using `itertools.chain`. + Instead of computing all the items in the iterable and storing them in memory, they are computed one-by-one and never + stored as a huge list. This can save on both runtime and memory space, [#1642](https://github.com/pydantic/pydantic/issues/1642) by [@cool-RR](https://github.com/cool-RR) +* Add `conset()`, analogous to `conlist()`, [#1623](https://github.com/pydantic/pydantic/issues/1623) by [@patrickkwang](https://github.com/patrickkwang) +* make *pydantic* errors (un)pickable, [#1616](https://github.com/pydantic/pydantic/issues/1616) by [@PrettyWood](https://github.com/PrettyWood) +* Allow custom encoding for `dotenv` files, [#1615](https://github.com/pydantic/pydantic/issues/1615) by [@PrettyWood](https://github.com/PrettyWood) +* Ensure `SchemaExtraCallable` is always defined to get type hints on BaseConfig, [#1614](https://github.com/pydantic/pydantic/issues/1614) by [@PrettyWood](https://github.com/PrettyWood) +* Update datetime parser to support negative timestamps, [#1600](https://github.com/pydantic/pydantic/issues/1600) by [@mlbiche](https://github.com/mlbiche) +* Update mypy, remove `AnyType` alias for `Type[Any]`, [#1598](https://github.com/pydantic/pydantic/issues/1598) by [@samuelcolvin](https://github.com/samuelcolvin) +* Adjust handling of root validators so that errors are aggregated from _all_ failing root validators, instead of reporting on only the first root validator to fail, [#1586](https://github.com/pydantic/pydantic/issues/1586) by [@beezee](https://github.com/beezee) +* Make `__modify_schema__` on Enums apply to the enum schema rather than fields that use the enum, [#1581](https://github.com/pydantic/pydantic/issues/1581) by [@therefromhere](https://github.com/therefromhere) +* Fix behavior of `__all__` key when used in conjunction with index keys in advanced include/exclude of fields that are sequences, [#1579](https://github.com/pydantic/pydantic/issues/1579) by [@xspirus](https://github.com/xspirus) +* Subclass validators do not run when referencing a `List` field defined in a parent class when `each_item=True`. Added an example to the docs illustrating this, [#1566](https://github.com/pydantic/pydantic/issues/1566) by [@samueldeklund](https://github.com/samueldeklund) +* change `schema.field_class_to_schema` to support `frozenset` in schema, [#1557](https://github.com/pydantic/pydantic/issues/1557) by [@wangpeibao](https://github.com/wangpeibao) +* Call `__modify_schema__` only for the field schema, [#1552](https://github.com/pydantic/pydantic/issues/1552) by [@PrettyWood](https://github.com/PrettyWood) +* Move the assignment of `field.validate_always` in `fields.py` so the `always` parameter of validators work on inheritance, [#1545](https://github.com/pydantic/pydantic/issues/1545) by [@dcHHH](https://github.com/dcHHH) +* Added support for UUID instantiation through 16 byte strings such as `b'\x12\x34\x56\x78' * 4`. This was done to support `BINARY(16)` columns in sqlalchemy, [#1541](https://github.com/pydantic/pydantic/issues/1541) by [@shawnwall](https://github.com/shawnwall) +* Add a test assertion that `default_factory` can return a singleton, [#1523](https://github.com/pydantic/pydantic/issues/1523) by [@therefromhere](https://github.com/therefromhere) +* Add `NameEmail.__eq__` so duplicate `NameEmail` instances are evaluated as equal, [#1514](https://github.com/pydantic/pydantic/issues/1514) by [@stephen-bunn](https://github.com/stephen-bunn) +* Add datamodel-code-generator link in pydantic document site, [#1500](https://github.com/pydantic/pydantic/issues/1500) by [@koxudaxi](https://github.com/koxudaxi) +* Added a "Discussion of Pydantic" section to the documentation, with a link to "Pydantic Introduction" video by Alexander Hultnér, [#1499](https://github.com/pydantic/pydantic/issues/1499) by [@hultner](https://github.com/hultner) +* Avoid some side effects of `default_factory` by calling it only once + if possible and by not setting a default value in the schema, [#1491](https://github.com/pydantic/pydantic/issues/1491) by [@PrettyWood](https://github.com/PrettyWood) +* Added docs about dumping dataclasses to JSON, [#1487](https://github.com/pydantic/pydantic/issues/1487) by [@mikegrima](https://github.com/mikegrima) +* Make `BaseModel.__signature__` class-only, so getting `__signature__` from model instance will raise `AttributeError`, [#1466](https://github.com/pydantic/pydantic/issues/1466) by [@Bobronium](https://github.com/Bobronium) +* include `'format': 'password'` in the schema for secret types, [#1424](https://github.com/pydantic/pydantic/issues/1424) by [@atheuz](https://github.com/atheuz) +* Modify schema constraints on `ConstrainedFloat` so that `exclusiveMinimum` and + minimum are not included in the schema if they are equal to `-math.inf` and + `exclusiveMaximum` and `maximum` are not included if they are equal to `math.inf`, [#1417](https://github.com/pydantic/pydantic/issues/1417) by [@vdwees](https://github.com/vdwees) +* Squash internal `__root__` dicts in `.dict()` (and, by extension, in `.json()`), [#1414](https://github.com/pydantic/pydantic/issues/1414) by [@patrickkwang](https://github.com/patrickkwang) +* Move `const` validator to post-validators so it validates the parsed value, [#1410](https://github.com/pydantic/pydantic/issues/1410) by [@selimb](https://github.com/selimb) +* Fix model validation to handle nested literals, e.g. `Literal['foo', Literal['bar']]`, [#1364](https://github.com/pydantic/pydantic/issues/1364) by [@DBCerigo](https://github.com/DBCerigo) +* Remove `user_required = True` from `RedisDsn`, neither user nor password are required, [#1275](https://github.com/pydantic/pydantic/issues/1275) by [@samuelcolvin](https://github.com/samuelcolvin) +* Remove extra `allOf` from schema for fields with `Union` and custom `Field`, [#1209](https://github.com/pydantic/pydantic/issues/1209) by [@mostaphaRoudsari](https://github.com/mostaphaRoudsari) +* Updates OpenAPI schema generation to output all enums as separate models. + Instead of inlining the enum values in the model schema, models now use a `$ref` + property to point to the enum definition, [#1173](https://github.com/pydantic/pydantic/issues/1173) by [@calvinwyoung](https://github.com/calvinwyoung) + +## v1.5.1 (2020-04-23) + +* Signature generation with `extra: allow` never uses a field name, [#1418](https://github.com/pydantic/pydantic/issues/1418) by [@prettywood](https://github.com/prettywood) +* Avoid mutating `Field` default value, [#1412](https://github.com/pydantic/pydantic/issues/1412) by [@prettywood](https://github.com/prettywood) + +## v1.5 (2020-04-18) + +* Make includes/excludes arguments for `.dict()`, `._iter()`, ..., immutable, [#1404](https://github.com/pydantic/pydantic/issues/1404) by [@AlexECX](https://github.com/AlexECX) +* Always use a field's real name with includes/excludes in `model._iter()`, regardless of `by_alias`, [#1397](https://github.com/pydantic/pydantic/issues/1397) by [@AlexECX](https://github.com/AlexECX) +* Update constr regex example to include start and end lines, [#1396](https://github.com/pydantic/pydantic/issues/1396) by [@lmcnearney](https://github.com/lmcnearney) +* Confirm that shallow `model.copy()` does make a shallow copy of attributes, [#1383](https://github.com/pydantic/pydantic/issues/1383) by [@samuelcolvin](https://github.com/samuelcolvin) +* Renaming `model_name` argument of `main.create_model()` to `__model_name` to allow using `model_name` as a field name, [#1367](https://github.com/pydantic/pydantic/issues/1367) by [@kittipatv](https://github.com/kittipatv) +* Replace raising of exception to silent passing for non-Var attributes in mypy plugin, [#1345](https://github.com/pydantic/pydantic/issues/1345) by [@b0g3r](https://github.com/b0g3r) +* Remove `typing_extensions` dependency for Python 3.8, [#1342](https://github.com/pydantic/pydantic/issues/1342) by [@prettywood](https://github.com/prettywood) +* Make `SecretStr` and `SecretBytes` initialization idempotent, [#1330](https://github.com/pydantic/pydantic/issues/1330) by [@atheuz](https://github.com/atheuz) +* document making secret types dumpable using the json method, [#1328](https://github.com/pydantic/pydantic/issues/1328) by [@atheuz](https://github.com/atheuz) +* Move all testing and build to github actions, add windows and macos binaries, + thank you [@StephenBrown2](https://github.com/StephenBrown2) for much help, [#1326](https://github.com/pydantic/pydantic/issues/1326) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix card number length check in `PaymentCardNumber`, `PaymentCardBrand` now inherits from `str`, [#1317](https://github.com/pydantic/pydantic/issues/1317) by [@samuelcolvin](https://github.com/samuelcolvin) +* Have `BaseModel` inherit from `Representation` to make mypy happy when overriding `__str__`, [#1310](https://github.com/pydantic/pydantic/issues/1310) by [@FuegoFro](https://github.com/FuegoFro) +* Allow `None` as input to all optional list fields, [#1307](https://github.com/pydantic/pydantic/issues/1307) by [@prettywood](https://github.com/prettywood) +* Add `datetime` field to `default_factory` example, [#1301](https://github.com/pydantic/pydantic/issues/1301) by [@StephenBrown2](https://github.com/StephenBrown2) +* Allow subclasses of known types to be encoded with superclass encoder, [#1291](https://github.com/pydantic/pydantic/issues/1291) by [@StephenBrown2](https://github.com/StephenBrown2) +* Exclude exported fields from all elements of a list/tuple of submodels/dicts with `'__all__'`, [#1286](https://github.com/pydantic/pydantic/issues/1286) by [@masalim2](https://github.com/masalim2) +* Add pydantic.color.Color objects as available input for Color fields, [#1258](https://github.com/pydantic/pydantic/issues/1258) by [@leosussan](https://github.com/leosussan) +* In examples, type nullable fields as `Optional`, so that these are valid mypy annotations, [#1248](https://github.com/pydantic/pydantic/issues/1248) by [@kokes](https://github.com/kokes) +* Make `pattern_validator()` accept pre-compiled `Pattern` objects. Fix `str_validator()` return type to `str`, [#1237](https://github.com/pydantic/pydantic/issues/1237) by [@adamgreg](https://github.com/adamgreg) +* Document how to manage Generics and inheritance, [#1229](https://github.com/pydantic/pydantic/issues/1229) by [@esadruhn](https://github.com/esadruhn) +* `update_forward_refs()` method of BaseModel now copies `__dict__` of class module instead of modyfying it, [#1228](https://github.com/pydantic/pydantic/issues/1228) by [@paul-ilyin](https://github.com/paul-ilyin) +* Support instance methods and class methods with `@validate_arguments`, [#1222](https://github.com/pydantic/pydantic/issues/1222) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add `default_factory` argument to `Field` to create a dynamic default value by passing a zero-argument callable, [#1210](https://github.com/pydantic/pydantic/issues/1210) by [@prettywood](https://github.com/prettywood) +* add support for `NewType` of `List`, `Optional`, etc, [#1207](https://github.com/pydantic/pydantic/issues/1207) by [@Kazy](https://github.com/Kazy) +* fix mypy signature for `root_validator`, [#1192](https://github.com/pydantic/pydantic/issues/1192) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fixed parsing of nested 'custom root type' models, [#1190](https://github.com/pydantic/pydantic/issues/1190) by [@Shados](https://github.com/Shados) +* Add `validate_arguments` function decorator which checks the arguments to a function matches type annotations, [#1179](https://github.com/pydantic/pydantic/issues/1179) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add `__signature__` to models, [#1034](https://github.com/pydantic/pydantic/issues/1034) by [@Bobronium](https://github.com/Bobronium) +* Refactor `._iter()` method, 10x speed boost for `dict(model)`, [#1017](https://github.com/pydantic/pydantic/issues/1017) by [@Bobronium](https://github.com/Bobronium) + +## v1.4 (2020-01-24) + +* **Breaking Change:** alias precedence logic changed so aliases on a field always take priority over + an alias from `alias_generator` to avoid buggy/unexpected behaviour, + see [here](https://pydantic-docs.helpmanual.io/usage/model_config/#alias-precedence) for details, [#1178](https://github.com/pydantic/pydantic/issues/1178) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add support for unicode and punycode in TLDs, [#1182](https://github.com/pydantic/pydantic/issues/1182) by [@jamescurtin](https://github.com/jamescurtin) +* Fix `cls` argument in validators during assignment, [#1172](https://github.com/pydantic/pydantic/issues/1172) by [@samuelcolvin](https://github.com/samuelcolvin) +* completing Luhn algorithm for `PaymentCardNumber`, [#1166](https://github.com/pydantic/pydantic/issues/1166) by [@cuencandres](https://github.com/cuencandres) +* add support for generics that implement `__get_validators__` like a custom data type, [#1159](https://github.com/pydantic/pydantic/issues/1159) by [@tiangolo](https://github.com/tiangolo) +* add support for infinite generators with `Iterable`, [#1152](https://github.com/pydantic/pydantic/issues/1152) by [@tiangolo](https://github.com/tiangolo) +* fix `url_regex` to accept schemas with `+`, `-` and `.` after the first character, [#1142](https://github.com/pydantic/pydantic/issues/1142) by [@samuelcolvin](https://github.com/samuelcolvin) +* move `version_info()` to `version.py`, suggest its use in issues, [#1138](https://github.com/pydantic/pydantic/issues/1138) by [@samuelcolvin](https://github.com/samuelcolvin) +* Improve pydantic import time by roughly 50% by deferring some module loading and regex compilation, [#1127](https://github.com/pydantic/pydantic/issues/1127) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix `EmailStr` and `NameEmail` to accept instances of themselves in cython, [#1126](https://github.com/pydantic/pydantic/issues/1126) by [@koxudaxi](https://github.com/koxudaxi) +* Pass model class to the `Config.schema_extra` callable, [#1125](https://github.com/pydantic/pydantic/issues/1125) by [@therefromhere](https://github.com/therefromhere) +* Fix regex for username and password in URLs, [#1115](https://github.com/pydantic/pydantic/issues/1115) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add support for nested generic models, [#1104](https://github.com/pydantic/pydantic/issues/1104) by [@dmontagu](https://github.com/dmontagu) +* add `__all__` to `__init__.py` to prevent "implicit reexport" errors from mypy, [#1072](https://github.com/pydantic/pydantic/issues/1072) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add support for using "dotenv" files with `BaseSettings`, [#1011](https://github.com/pydantic/pydantic/issues/1011) by [@acnebs](https://github.com/acnebs) + +## v1.3 (2019-12-21) + +* Change `schema` and `schema_model` to handle dataclasses by using their `__pydantic_model__` feature, [#792](https://github.com/pydantic/pydantic/issues/792) by [@aviramha](https://github.com/aviramha) +* Added option for `root_validator` to be skipped if values validation fails using keyword `skip_on_failure=True`, [#1049](https://github.com/pydantic/pydantic/issues/1049) by [@aviramha](https://github.com/aviramha) +* Allow `Config.schema_extra` to be a callable so that the generated schema can be post-processed, [#1054](https://github.com/pydantic/pydantic/issues/1054) by [@selimb](https://github.com/selimb) +* Update mypy to version 0.750, [#1057](https://github.com/pydantic/pydantic/issues/1057) by [@dmontagu](https://github.com/dmontagu) +* Trick Cython into allowing str subclassing, [#1061](https://github.com/pydantic/pydantic/issues/1061) by [@skewty](https://github.com/skewty) +* Prevent type attributes being added to schema unless the attribute `__schema_attributes__` is `True`, [#1064](https://github.com/pydantic/pydantic/issues/1064) by [@samuelcolvin](https://github.com/samuelcolvin) +* Change `BaseModel.parse_file` to use `Config.json_loads`, [#1067](https://github.com/pydantic/pydantic/issues/1067) by [@kierandarcy](https://github.com/kierandarcy) +* Fix for optional `Json` fields, [#1073](https://github.com/pydantic/pydantic/issues/1073) by [@volker48](https://github.com/volker48) +* Change the default number of threads used when compiling with cython to one, + allow override via the `CYTHON_NTHREADS` environment variable, [#1074](https://github.com/pydantic/pydantic/issues/1074) by [@samuelcolvin](https://github.com/samuelcolvin) +* Run FastAPI tests during Pydantic's CI tests, [#1075](https://github.com/pydantic/pydantic/issues/1075) by [@tiangolo](https://github.com/tiangolo) +* My mypy strictness constraints, and associated tweaks to type annotations, [#1077](https://github.com/pydantic/pydantic/issues/1077) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add `__eq__` to SecretStr and SecretBytes to allow "value equals", [#1079](https://github.com/pydantic/pydantic/issues/1079) by [@sbv-trueenergy](https://github.com/sbv-trueenergy) +* Fix schema generation for nested None case, [#1088](https://github.com/pydantic/pydantic/issues/1088) by [@lutostag](https://github.com/lutostag) +* Consistent checks for sequence like objects, [#1090](https://github.com/pydantic/pydantic/issues/1090) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix `Config` inheritance on `BaseSettings` when used with `env_prefix`, [#1091](https://github.com/pydantic/pydantic/issues/1091) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix for `__modify_schema__` when it conflicted with `field_class_to_schema*`, [#1102](https://github.com/pydantic/pydantic/issues/1102) by [@samuelcolvin](https://github.com/samuelcolvin) +* docs: Fix explanation of case sensitive environment variable names when populating `BaseSettings` subclass attributes, [#1105](https://github.com/pydantic/pydantic/issues/1105) by [@tribals](https://github.com/tribals) +* Rename django-rest-framework benchmark in documentation, [#1119](https://github.com/pydantic/pydantic/issues/1119) by [@frankie567](https://github.com/frankie567) + +## v1.2 (2019-11-28) + +* **Possible Breaking Change:** Add support for required `Optional` with `name: Optional[AnyType] = Field(...)` + and refactor `ModelField` creation to preserve `required` parameter value, [#1031](https://github.com/pydantic/pydantic/issues/1031) by [@tiangolo](https://github.com/tiangolo); + see [here](https://pydantic-docs.helpmanual.io/usage/models/#required-optional-fields) for details +* Add benchmarks for `cattrs`, [#513](https://github.com/pydantic/pydantic/issues/513) by [@sebastianmika](https://github.com/sebastianmika) +* Add `exclude_none` option to `dict()` and friends, [#587](https://github.com/pydantic/pydantic/issues/587) by [@niknetniko](https://github.com/niknetniko) +* Add benchmarks for `valideer`, [#670](https://github.com/pydantic/pydantic/issues/670) by [@gsakkis](https://github.com/gsakkis) +* Add `parse_obj_as` and `parse_file_as` functions for ad-hoc parsing of data into arbitrary pydantic-compatible types, [#934](https://github.com/pydantic/pydantic/issues/934) by [@dmontagu](https://github.com/dmontagu) +* Add `allow_reuse` argument to validators, thus allowing validator reuse, [#940](https://github.com/pydantic/pydantic/issues/940) by [@dmontagu](https://github.com/dmontagu) +* Add support for mapping types for custom root models, [#958](https://github.com/pydantic/pydantic/issues/958) by [@dmontagu](https://github.com/dmontagu) +* Mypy plugin support for dataclasses, [#966](https://github.com/pydantic/pydantic/issues/966) by [@koxudaxi](https://github.com/koxudaxi) +* Add support for dataclasses default factory, [#968](https://github.com/pydantic/pydantic/issues/968) by [@ahirner](https://github.com/ahirner) +* Add a `ByteSize` type for converting byte string (`1GB`) to plain bytes, [#977](https://github.com/pydantic/pydantic/issues/977) by [@dgasmith](https://github.com/dgasmith) +* Fix mypy complaint about `@root_validator(pre=True)`, [#984](https://github.com/pydantic/pydantic/issues/984) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add manylinux binaries for Python 3.8 to pypi, also support manylinux2010, [#994](https://github.com/pydantic/pydantic/issues/994) by [@samuelcolvin](https://github.com/samuelcolvin) +* Adds ByteSize conversion to another unit, [#995](https://github.com/pydantic/pydantic/issues/995) by [@dgasmith](https://github.com/dgasmith) +* Fix `__str__` and `__repr__` inheritance for models, [#1022](https://github.com/pydantic/pydantic/issues/1022) by [@samuelcolvin](https://github.com/samuelcolvin) +* add testimonials section to docs, [#1025](https://github.com/pydantic/pydantic/issues/1025) by [@sullivancolin](https://github.com/sullivancolin) +* Add support for `typing.Literal` for Python 3.8, [#1026](https://github.com/pydantic/pydantic/issues/1026) by [@dmontagu](https://github.com/dmontagu) + +## v1.1.1 (2019-11-20) + +* Fix bug where use of complex fields on sub-models could cause fields to be incorrectly configured, [#1015](https://github.com/pydantic/pydantic/issues/1015) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v1.1 (2019-11-07) + +* Add a mypy plugin for type checking `BaseModel.__init__` and more, [#722](https://github.com/pydantic/pydantic/issues/722) by [@dmontagu](https://github.com/dmontagu) +* Change return type typehint for `GenericModel.__class_getitem__` to prevent PyCharm warnings, [#936](https://github.com/pydantic/pydantic/issues/936) by [@dmontagu](https://github.com/dmontagu) +* Fix usage of `Any` to allow `None`, also support `TypeVar` thus allowing use of un-parameterised collection types + e.g. `Dict` and `List`, [#962](https://github.com/pydantic/pydantic/issues/962) by [@samuelcolvin](https://github.com/samuelcolvin) +* Set `FieldInfo` on subfields to fix schema generation for complex nested types, [#965](https://github.com/pydantic/pydantic/issues/965) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v1.0 (2019-10-23) + +* **Breaking Change:** deprecate the `Model.fields` property, use `Model.__fields__` instead, [#883](https://github.com/pydantic/pydantic/issues/883) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change:** Change the precedence of aliases so child model aliases override parent aliases, + including using `alias_generator`, [#904](https://github.com/pydantic/pydantic/issues/904) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking change:** Rename `skip_defaults` to `exclude_unset`, and add ability to exclude actual defaults, [#915](https://github.com/pydantic/pydantic/issues/915) by [@dmontagu](https://github.com/dmontagu) +* Add `**kwargs` to `pydantic.main.ModelMetaclass.__new__` so `__init_subclass__` can take custom parameters on extended + `BaseModel` classes, [#867](https://github.com/pydantic/pydantic/issues/867) by [@retnikt](https://github.com/retnikt) +* Fix field of a type that has a default value, [#880](https://github.com/pydantic/pydantic/issues/880) by [@koxudaxi](https://github.com/koxudaxi) +* Use `FutureWarning` instead of `DeprecationWarning` when `alias` instead of `env` is used for settings models, [#881](https://github.com/pydantic/pydantic/issues/881) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix issue with `BaseSettings` inheritance and `alias` getting set to `None`, [#882](https://github.com/pydantic/pydantic/issues/882) by [@samuelcolvin](https://github.com/samuelcolvin) +* Modify `__repr__` and `__str__` methods to be consistent across all public classes, add `__pretty__` to support + python-devtools, [#884](https://github.com/pydantic/pydantic/issues/884) by [@samuelcolvin](https://github.com/samuelcolvin) +* deprecation warning for `case_insensitive` on `BaseSettings` config, [#885](https://github.com/pydantic/pydantic/issues/885) by [@samuelcolvin](https://github.com/samuelcolvin) +* For `BaseSettings` merge environment variables and in-code values recursively, as long as they create a valid object + when merged together, to allow splitting init arguments, [#888](https://github.com/pydantic/pydantic/issues/888) by [@idmitrievsky](https://github.com/idmitrievsky) +* change secret types example, [#890](https://github.com/pydantic/pydantic/issues/890) by [@ashears](https://github.com/ashears) +* Change the signature of `Model.construct()` to be more user-friendly, document `construct()` usage, [#898](https://github.com/pydantic/pydantic/issues/898) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add example for the `construct()` method, [#907](https://github.com/pydantic/pydantic/issues/907) by [@ashears](https://github.com/ashears) +* Improve use of `Field` constraints on complex types, raise an error if constraints are not enforceable, + also support tuples with an ellipsis `Tuple[X, ...]`, `Sequence` and `FrozenSet` in schema, [#909](https://github.com/pydantic/pydantic/issues/909) by [@samuelcolvin](https://github.com/samuelcolvin) +* update docs for bool missing valid value, [#911](https://github.com/pydantic/pydantic/issues/911) by [@trim21](https://github.com/trim21) +* Better `str`/`repr` logic for `ModelField`, [#912](https://github.com/pydantic/pydantic/issues/912) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix `ConstrainedList`, update schema generation to reflect `min_items` and `max_items` `Field()` arguments, [#917](https://github.com/pydantic/pydantic/issues/917) by [@samuelcolvin](https://github.com/samuelcolvin) +* Allow abstracts sets (eg. dict keys) in the `include` and `exclude` arguments of `dict()`, [#921](https://github.com/pydantic/pydantic/issues/921) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix JSON serialization errors on `ValidationError.json()` by using `pydantic_encoder`, [#922](https://github.com/pydantic/pydantic/issues/922) by [@samuelcolvin](https://github.com/samuelcolvin) +* Clarify usage of `remove_untouched`, improve error message for types with no validators, [#926](https://github.com/pydantic/pydantic/issues/926) by [@retnikt](https://github.com/retnikt) + +## v1.0b2 (2019-10-07) + +* Mark `StrictBool` typecheck as `bool` to allow for default values without mypy errors, [#690](https://github.com/pydantic/pydantic/issues/690) by [@dmontagu](https://github.com/dmontagu) +* Transfer the documentation build from sphinx to mkdocs, re-write much of the documentation, [#856](https://github.com/pydantic/pydantic/issues/856) by [@samuelcolvin](https://github.com/samuelcolvin) +* Add support for custom naming schemes for `GenericModel` subclasses, [#859](https://github.com/pydantic/pydantic/issues/859) by [@dmontagu](https://github.com/dmontagu) +* Add `if TYPE_CHECKING:` to the excluded lines for test coverage, [#874](https://github.com/pydantic/pydantic/issues/874) by [@dmontagu](https://github.com/dmontagu) +* Rename `allow_population_by_alias` to `allow_population_by_field_name`, remove unnecessary warning about it, [#875](https://github.com/pydantic/pydantic/issues/875) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v1.0b1 (2019-10-01) + +* **Breaking Change:** rename `Schema` to `Field`, make it a function to placate mypy, [#577](https://github.com/pydantic/pydantic/issues/577) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change:** modify parsing behavior for `bool`, [#617](https://github.com/pydantic/pydantic/issues/617) by [@dmontagu](https://github.com/dmontagu) +* **Breaking Change:** `get_validators` is no longer recognised, use `__get_validators__`. + `Config.ignore_extra` and `Config.allow_extra` are no longer recognised, use `Config.extra`, [#720](https://github.com/pydantic/pydantic/issues/720) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change:** modify default config settings for `BaseSettings`; `case_insensitive` renamed to `case_sensitive`, + default changed to `case_sensitive = False`, `env_prefix` default changed to `''` - e.g. no prefix, [#721](https://github.com/pydantic/pydantic/issues/721) by [@dmontagu](https://github.com/dmontagu) +* **Breaking change:** Implement `root_validator` and rename root errors from `__obj__` to `__root__`, [#729](https://github.com/pydantic/pydantic/issues/729) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change:** alter the behaviour of `dict(model)` so that sub-models are nolonger + converted to dictionaries, [#733](https://github.com/pydantic/pydantic/issues/733) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking change:** Added `initvars` support to `post_init_post_parse`, [#748](https://github.com/pydantic/pydantic/issues/748) by [@Raphael-C-Almeida](https://github.com/Raphael-C-Almeida) +* **Breaking Change:** Make `BaseModel.json()` only serialize the `__root__` key for models with custom root, [#752](https://github.com/pydantic/pydantic/issues/752) by [@dmontagu](https://github.com/dmontagu) +* **Breaking Change:** complete rewrite of `URL` parsing logic, [#755](https://github.com/pydantic/pydantic/issues/755) by [@samuelcolvin](https://github.com/samuelcolvin) +* **Breaking Change:** preserve superclass annotations for field-determination when not provided in subclass, [#757](https://github.com/pydantic/pydantic/issues/757) by [@dmontagu](https://github.com/dmontagu) +* **Breaking Change:** `BaseSettings` now uses the special `env` settings to define which environment variables to + read, not aliases, [#847](https://github.com/pydantic/pydantic/issues/847) by [@samuelcolvin](https://github.com/samuelcolvin) +* add support for `assert` statements inside validators, [#653](https://github.com/pydantic/pydantic/issues/653) by [@abdusco](https://github.com/abdusco) +* Update documentation to specify the use of `pydantic.dataclasses.dataclass` and subclassing `pydantic.BaseModel`, [#710](https://github.com/pydantic/pydantic/issues/710) by [@maddosaurus](https://github.com/maddosaurus) +* Allow custom JSON decoding and encoding via `json_loads` and `json_dumps` `Config` properties, [#714](https://github.com/pydantic/pydantic/issues/714) by [@samuelcolvin](https://github.com/samuelcolvin) +* make all annotated fields occur in the order declared, [#715](https://github.com/pydantic/pydantic/issues/715) by [@dmontagu](https://github.com/dmontagu) +* use pytest to test `mypy` integration, [#735](https://github.com/pydantic/pydantic/issues/735) by [@dmontagu](https://github.com/dmontagu) +* add `__repr__` method to `ErrorWrapper`, [#738](https://github.com/pydantic/pydantic/issues/738) by [@samuelcolvin](https://github.com/samuelcolvin) +* Added support for `FrozenSet` members in dataclasses, and a better error when attempting to use types from the `typing` module that are not supported by Pydantic, [#745](https://github.com/pydantic/pydantic/issues/745) by [@djpetti](https://github.com/djpetti) +* add documentation for Pycharm Plugin, [#750](https://github.com/pydantic/pydantic/issues/750) by [@koxudaxi](https://github.com/koxudaxi) +* fix broken examples in the docs, [#753](https://github.com/pydantic/pydantic/issues/753) by [@dmontagu](https://github.com/dmontagu) +* moving typing related objects into `pydantic.typing`, [#761](https://github.com/pydantic/pydantic/issues/761) by [@samuelcolvin](https://github.com/samuelcolvin) +* Minor performance improvements to `ErrorWrapper`, `ValidationError` and datetime parsing, [#763](https://github.com/pydantic/pydantic/issues/763) by [@samuelcolvin](https://github.com/samuelcolvin) +* Improvements to `datetime`/`date`/`time`/`timedelta` types: more descriptive errors, + change errors to `value_error` not `type_error`, support bytes, [#766](https://github.com/pydantic/pydantic/issues/766) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix error messages for `Literal` types with multiple allowed values, [#770](https://github.com/pydantic/pydantic/issues/770) by [@dmontagu](https://github.com/dmontagu) +* Improved auto-generated `title` field in JSON schema by converting underscore to space, [#772](https://github.com/pydantic/pydantic/issues/772) by [@skewty](https://github.com/skewty) +* support `mypy --no-implicit-reexport` for dataclasses, also respect `--no-implicit-reexport` in pydantic itself, [#783](https://github.com/pydantic/pydantic/issues/783) by [@samuelcolvin](https://github.com/samuelcolvin) +* add the `PaymentCardNumber` type, [#790](https://github.com/pydantic/pydantic/issues/790) by [@matin](https://github.com/matin) +* Fix const validations for lists, [#794](https://github.com/pydantic/pydantic/issues/794) by [@hmvp](https://github.com/hmvp) +* Set `additionalProperties` to false in schema for models with extra fields disallowed, [#796](https://github.com/pydantic/pydantic/issues/796) by [@Code0x58](https://github.com/Code0x58) +* `EmailStr` validation method now returns local part case-sensitive per RFC 5321, [#798](https://github.com/pydantic/pydantic/issues/798) by [@henriklindgren](https://github.com/henriklindgren) +* Added ability to validate strictness to `ConstrainedFloat`, `ConstrainedInt` and `ConstrainedStr` and added + `StrictFloat` and `StrictInt` classes, [#799](https://github.com/pydantic/pydantic/issues/799) by [@DerRidda](https://github.com/DerRidda) +* Improve handling of `None` and `Optional`, replace `whole` with `each_item` (inverse meaning, default `False`) + on validators, [#803](https://github.com/pydantic/pydantic/issues/803) by [@samuelcolvin](https://github.com/samuelcolvin) +* add support for `Type[T]` type hints, [#807](https://github.com/pydantic/pydantic/issues/807) by [@timonbimon](https://github.com/timonbimon) +* Performance improvements from removing `change_exceptions`, change how pydantic error are constructed, [#819](https://github.com/pydantic/pydantic/issues/819) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix the error message arising when a `BaseModel`-type model field causes a `ValidationError` during parsing, [#820](https://github.com/pydantic/pydantic/issues/820) by [@dmontagu](https://github.com/dmontagu) +* allow `getter_dict` on `Config`, modify `GetterDict` to be more like a `Mapping` object and thus easier to work with, [#821](https://github.com/pydantic/pydantic/issues/821) by [@samuelcolvin](https://github.com/samuelcolvin) +* Only check `TypeVar` param on base `GenericModel` class, [#842](https://github.com/pydantic/pydantic/issues/842) by [@zpencerq](https://github.com/zpencerq) +* rename `Model._schema_cache` -> `Model.__schema_cache__`, `Model._json_encoder` -> `Model.__json_encoder__`, + `Model._custom_root_type` -> `Model.__custom_root_type__`, [#851](https://github.com/pydantic/pydantic/issues/851) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.32.2 (2019-08-17) + +(Docs are available [here](https://5d584fcca7c9b70007d1c997--pydantic-docs.netlify.com)) + +* fix `__post_init__` usage with dataclass inheritance, fix [#739](https://github.com/pydantic/pydantic/issues/739) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix required fields validation on GenericModels classes, [#742](https://github.com/pydantic/pydantic/issues/742) by [@amitbl](https://github.com/amitbl) +* fix defining custom `Schema` on `GenericModel` fields, [#754](https://github.com/pydantic/pydantic/issues/754) by [@amitbl](https://github.com/amitbl) + +## v0.32.1 (2019-08-08) + +* do not validate extra fields when `validate_assignment` is on, [#724](https://github.com/pydantic/pydantic/issues/724) by [@YaraslauZhylko](https://github.com/YaraslauZhylko) + +## v0.32 (2019-08-06) + +* add model name to `ValidationError` error message, [#676](https://github.com/pydantic/pydantic/issues/676) by [@dmontagu](https://github.com/dmontagu) +* **breaking change**: remove `__getattr__` and rename `__values__` to `__dict__` on `BaseModel`, + deprecation warning on use `__values__` attr, attributes access speed increased up to 14 times, [#712](https://github.com/pydantic/pydantic/issues/712) by [@Bobronium](https://github.com/Bobronium) +* support `ForwardRef` (without self-referencing annotations) in Python 3.6, [#706](https://github.com/pydantic/pydantic/issues/706) by [@koxudaxi](https://github.com/koxudaxi) +* implement `schema_extra` in `Config` sub-class, [#663](https://github.com/pydantic/pydantic/issues/663) by [@tiangolo](https://github.com/tiangolo) + +## v0.31.1 (2019-07-31) + +* fix json generation for `EnumError`, [#697](https://github.com/pydantic/pydantic/issues/697) by [@dmontagu](https://github.com/dmontagu) +* update numerous dependencies + +## v0.31 (2019-07-24) + +* better support for floating point `multiple_of` values, [#652](https://github.com/pydantic/pydantic/issues/652) by [@justindujardin](https://github.com/justindujardin) +* fix schema generation for `NewType` and `Literal`, [#649](https://github.com/pydantic/pydantic/issues/649) by [@dmontagu](https://github.com/dmontagu) +* fix `alias_generator` and field config conflict, [#645](https://github.com/pydantic/pydantic/issues/645) by [@gmetzker](https://github.com/gmetzker) and [#658](https://github.com/pydantic/pydantic/issues/658) by [@Bobronium](https://github.com/Bobronium) +* more detailed message for `EnumError`, [#673](https://github.com/pydantic/pydantic/issues/673) by [@dmontagu](https://github.com/dmontagu) +* add advanced exclude support for `dict`, `json` and `copy`, [#648](https://github.com/pydantic/pydantic/issues/648) by [@Bobronium](https://github.com/Bobronium) +* fix bug in `GenericModel` for models with concrete parameterized fields, [#672](https://github.com/pydantic/pydantic/issues/672) by [@dmontagu](https://github.com/dmontagu) +* add documentation for `Literal` type, [#651](https://github.com/pydantic/pydantic/issues/651) by [@dmontagu](https://github.com/dmontagu) +* add `Config.keep_untouched` for custom descriptors support, [#679](https://github.com/pydantic/pydantic/issues/679) by [@Bobronium](https://github.com/Bobronium) +* use `inspect.cleandoc` internally to get model description, [#657](https://github.com/pydantic/pydantic/issues/657) by [@tiangolo](https://github.com/tiangolo) +* add `Color` to schema generation, by [@euri10](https://github.com/euri10) +* add documentation for Literal type, [#651](https://github.com/pydantic/pydantic/issues/651) by [@dmontagu](https://github.com/dmontagu) + +## v0.30.1 (2019-07-15) + +* fix so nested classes which inherit and change `__init__` are correctly processed while still allowing `self` as a + parameter, [#644](https://github.com/pydantic/pydantic/issues/644) by [@lnaden](https://github.com/lnaden) and [@dgasmith](https://github.com/dgasmith) + +## v0.30 (2019-07-07) + +* enforce single quotes in code, [#612](https://github.com/pydantic/pydantic/issues/612) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix infinite recursion with dataclass inheritance and `__post_init__`, [#606](https://github.com/pydantic/pydantic/issues/606) by [@Hanaasagi](https://github.com/Hanaasagi) +* fix default values for `GenericModel`, [#610](https://github.com/pydantic/pydantic/issues/610) by [@dmontagu](https://github.com/dmontagu) +* clarify that self-referencing models require Python 3.7+, [#616](https://github.com/pydantic/pydantic/issues/616) by [@vlcinsky](https://github.com/vlcinsky) +* fix truncate for types, [#611](https://github.com/pydantic/pydantic/issues/611) by [@dmontagu](https://github.com/dmontagu) +* add `alias_generator` support, [#622](https://github.com/pydantic/pydantic/issues/622) by [@Bobronium](https://github.com/Bobronium) +* fix unparameterized generic type schema generation, [#625](https://github.com/pydantic/pydantic/issues/625) by [@dmontagu](https://github.com/dmontagu) +* fix schema generation with multiple/circular references to the same model, [#621](https://github.com/pydantic/pydantic/issues/621) by [@tiangolo](https://github.com/tiangolo) and [@wongpat](https://github.com/wongpat) +* support custom root types, [#628](https://github.com/pydantic/pydantic/issues/628) by [@koxudaxi](https://github.com/koxudaxi) +* support `self` as a field name in `parse_obj`, [#632](https://github.com/pydantic/pydantic/issues/632) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.29 (2019-06-19) + +* support dataclasses.InitVar, [#592](https://github.com/pydantic/pydantic/issues/592) by [@pfrederiks](https://github.com/pfrederiks) +* Updated documentation to elucidate the usage of `Union` when defining multiple types under an attribute's + annotation and showcase how the type-order can affect marshalling of provided values, [#594](https://github.com/pydantic/pydantic/issues/594) by [@somada141](https://github.com/somada141) +* add `conlist` type, [#583](https://github.com/pydantic/pydantic/issues/583) by [@hmvp](https://github.com/hmvp) +* add support for generics, [#595](https://github.com/pydantic/pydantic/issues/595) by [@dmontagu](https://github.com/dmontagu) + +## v0.28 (2019-06-06) + +* fix support for JSON Schema generation when using models with circular references in Python 3.7, [#572](https://github.com/pydantic/pydantic/issues/572) by [@tiangolo](https://github.com/tiangolo) +* support `__post_init_post_parse__` on dataclasses, [#567](https://github.com/pydantic/pydantic/issues/567) by [@sevaho](https://github.com/sevaho) +* allow dumping dataclasses to JSON, [#575](https://github.com/pydantic/pydantic/issues/575) by [@samuelcolvin](https://github.com/samuelcolvin) and [@DanielOberg](https://github.com/DanielOberg) +* ORM mode, [#562](https://github.com/pydantic/pydantic/issues/562) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix `pydantic.compiled` on ipython, [#573](https://github.com/pydantic/pydantic/issues/573) by [@dmontagu](https://github.com/dmontagu) and [@samuelcolvin](https://github.com/samuelcolvin) +* add `StrictBool` type, [#579](https://github.com/pydantic/pydantic/issues/579) by [@cazgp](https://github.com/cazgp) + +## v0.27 (2019-05-30) + +* **breaking change** `_pydantic_post_init` to execute dataclass' original `__post_init__` before + validation, [#560](https://github.com/pydantic/pydantic/issues/560) by [@HeavenVolkoff](https://github.com/HeavenVolkoff) +* fix handling of generic types without specified parameters, [#550](https://github.com/pydantic/pydantic/issues/550) by [@dmontagu](https://github.com/dmontagu) +* **breaking change** (maybe): this is the first release compiled with **cython**, see the docs and please + submit an issue if you run into problems + +## v0.27.0a1 (2019-05-26) + +* fix JSON Schema for `list`, `tuple`, and `set`, [#540](https://github.com/pydantic/pydantic/issues/540) by [@tiangolo](https://github.com/tiangolo) +* compiling with cython, `manylinux` binaries, some other performance improvements, [#548](https://github.com/pydantic/pydantic/issues/548) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.26 (2019-05-22) + +* fix to schema generation for `IPvAnyAddress`, `IPvAnyInterface`, `IPvAnyNetwork` [#498](https://github.com/pydantic/pydantic/issues/498) by [@pilosus](https://github.com/pilosus) +* fix variable length tuples support, [#495](https://github.com/pydantic/pydantic/issues/495) by [@pilosus](https://github.com/pilosus) +* fix return type hint for `create_model`, [#526](https://github.com/pydantic/pydantic/issues/526) by [@dmontagu](https://github.com/dmontagu) +* **Breaking Change:** fix `.dict(skip_keys=True)` skipping values set via alias (this involves changing + `validate_model()` to always returns `Tuple[Dict[str, Any], Set[str], Optional[ValidationError]]`), [#517](https://github.com/pydantic/pydantic/issues/517) by [@sommd](https://github.com/sommd) +* fix to schema generation for `IPv4Address`, `IPv6Address`, `IPv4Interface`, + `IPv6Interface`, `IPv4Network`, `IPv6Network` [#532](https://github.com/pydantic/pydantic/issues/532) by [@euri10](https://github.com/euri10) +* add `Color` type, [#504](https://github.com/pydantic/pydantic/issues/504) by [@pilosus](https://github.com/pilosus) and [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.25 (2019-05-05) + +* Improve documentation on self-referencing models and annotations, [#487](https://github.com/pydantic/pydantic/issues/487) by [@theenglishway](https://github.com/theenglishway) +* fix `.dict()` with extra keys, [#490](https://github.com/pydantic/pydantic/issues/490) by [@JaewonKim](https://github.com/JaewonKim) +* support `const` keyword in `Schema`, [#434](https://github.com/pydantic/pydantic/issues/434) by [@Sean1708](https://github.com/Sean1708) + +## v0.24 (2019-04-23) + +* fix handling `ForwardRef` in sub-types, like `Union`, [#464](https://github.com/pydantic/pydantic/issues/464) by [@tiangolo](https://github.com/tiangolo) +* fix secret serialization, [#465](https://github.com/pydantic/pydantic/issues/465) by [@atheuz](https://github.com/atheuz) +* Support custom validators for dataclasses, [#454](https://github.com/pydantic/pydantic/issues/454) by [@primal100](https://github.com/primal100) +* fix `parse_obj` to cope with dict-like objects, [#472](https://github.com/pydantic/pydantic/issues/472) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix to schema generation in nested dataclass-based models, [#474](https://github.com/pydantic/pydantic/issues/474) by [@NoAnyLove](https://github.com/NoAnyLove) +* fix `json` for `Path`, `FilePath`, and `DirectoryPath` objects, [#473](https://github.com/pydantic/pydantic/issues/473) by [@mikegoodspeed](https://github.com/mikegoodspeed) + +## v0.23 (2019-04-04) + +* improve documentation for contributing section, [#441](https://github.com/pydantic/pydantic/issues/441) by [@pilosus](https://github.com/pilosus) +* improve README.rst to include essential information about the package, [#446](https://github.com/pydantic/pydantic/issues/446) by [@pilosus](https://github.com/pilosus) +* `IntEnum` support, [#444](https://github.com/pydantic/pydantic/issues/444) by [@potykion](https://github.com/potykion) +* fix PyObject callable value, [#409](https://github.com/pydantic/pydantic/issues/409) by [@pilosus](https://github.com/pilosus) +* fix `black` deprecation warnings after update, [#451](https://github.com/pydantic/pydantic/issues/451) by [@pilosus](https://github.com/pilosus) +* fix `ForwardRef` collection bug, [#450](https://github.com/pydantic/pydantic/issues/450) by [@tigerwings](https://github.com/tigerwings) +* Support specialized `ClassVars`, [#455](https://github.com/pydantic/pydantic/issues/455) by [@tyrylu](https://github.com/tyrylu) +* fix JSON serialization for `ipaddress` types, [#333](https://github.com/pydantic/pydantic/issues/333) by [@pilosus](https://github.com/pilosus) +* add `SecretStr` and `SecretBytes` types, [#452](https://github.com/pydantic/pydantic/issues/452) by [@atheuz](https://github.com/atheuz) + +## v0.22 (2019-03-29) + +* add `IPv{4,6,Any}Network` and `IPv{4,6,Any}Interface` types from `ipaddress` stdlib, [#333](https://github.com/pydantic/pydantic/issues/333) by [@pilosus](https://github.com/pilosus) +* add docs for `datetime` types, [#386](https://github.com/pydantic/pydantic/issues/386) by [@pilosus](https://github.com/pilosus) +* fix to schema generation in dataclass-based models, [#408](https://github.com/pydantic/pydantic/issues/408) by [@pilosus](https://github.com/pilosus) +* fix path in nested models, [#437](https://github.com/pydantic/pydantic/issues/437) by [@kataev](https://github.com/kataev) +* add `Sequence` support, [#304](https://github.com/pydantic/pydantic/issues/304) by [@pilosus](https://github.com/pilosus) + +## v0.21.0 (2019-03-15) + +* fix typo in `NoneIsNotAllowedError` message, [#414](https://github.com/pydantic/pydantic/issues/414) by [@YaraslauZhylko](https://github.com/YaraslauZhylko) +* add `IPvAnyAddress`, `IPv4Address` and `IPv6Address` types, [#333](https://github.com/pydantic/pydantic/issues/333) by [@pilosus](https://github.com/pilosus) + +## v0.20.1 (2019-02-26) + +* fix type hints of `parse_obj` and similar methods, [#405](https://github.com/pydantic/pydantic/issues/405) by [@erosennin](https://github.com/erosennin) +* fix submodel validation, [#403](https://github.com/pydantic/pydantic/issues/403) by [@samuelcolvin](https://github.com/samuelcolvin) +* correct type hints for `ValidationError.json`, [#406](https://github.com/pydantic/pydantic/issues/406) by [@layday](https://github.com/layday) + +## v0.20.0 (2019-02-18) + +* fix tests for Python 3.8, [#396](https://github.com/pydantic/pydantic/issues/396) by [@samuelcolvin](https://github.com/samuelcolvin) +* Adds fields to the `dir` method for autocompletion in interactive sessions, [#398](https://github.com/pydantic/pydantic/issues/398) by [@dgasmith](https://github.com/dgasmith) +* support `ForwardRef` (and therefore `from __future__ import annotations`) with dataclasses, [#397](https://github.com/pydantic/pydantic/issues/397) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.20.0a1 (2019-02-13) + +* **breaking change** (maybe): more sophisticated argument parsing for validators, any subset of + `values`, `config` and `field` is now permitted, eg. `(cls, value, field)`, + however the variadic key word argument ("`**kwargs`") **must** be called `kwargs`, [#388](https://github.com/pydantic/pydantic/issues/388) by [@samuelcolvin](https://github.com/samuelcolvin) +* **breaking change**: Adds `skip_defaults` argument to `BaseModel.dict()` to allow skipping of fields that + were not explicitly set, signature of `Model.construct()` changed, [#389](https://github.com/pydantic/pydantic/issues/389) by [@dgasmith](https://github.com/dgasmith) +* add `py.typed` marker file for PEP-561 support, [#391](https://github.com/pydantic/pydantic/issues/391) by [@je-l](https://github.com/je-l) +* Fix `extra` behaviour for multiple inheritance/mix-ins, [#394](https://github.com/pydantic/pydantic/issues/394) by [@YaraslauZhylko](https://github.com/YaraslauZhylko) + +## v0.19.0 (2019-02-04) + +* Support `Callable` type hint, fix [#279](https://github.com/pydantic/pydantic/issues/279) by [@proofit404](https://github.com/proofit404) +* Fix schema for fields with `validator` decorator, fix [#375](https://github.com/pydantic/pydantic/issues/375) by [@tiangolo](https://github.com/tiangolo) +* Add `multiple_of` constraint to `ConstrainedDecimal`, `ConstrainedFloat`, `ConstrainedInt` + and their related types `condecimal`, `confloat`, and `conint` [#371](https://github.com/pydantic/pydantic/issues/371), thanks [@StephenBrown2](https://github.com/StephenBrown2) +* Deprecated `ignore_extra` and `allow_extra` Config fields in favor of `extra`, [#352](https://github.com/pydantic/pydantic/issues/352) by [@liiight](https://github.com/liiight) +* Add type annotations to all functions, test fully with mypy, [#373](https://github.com/pydantic/pydantic/issues/373) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix for 'missing' error with `validate_all` or `validate_always`, [#381](https://github.com/pydantic/pydantic/issues/381) by [@samuelcolvin](https://github.com/samuelcolvin) +* Change the second/millisecond watershed for date/datetime parsing to `2e10`, [#385](https://github.com/pydantic/pydantic/issues/385) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.18.2 (2019-01-22) + +* Fix to schema generation with `Optional` fields, fix [#361](https://github.com/pydantic/pydantic/issues/361) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.18.1 (2019-01-17) + +* add `ConstrainedBytes` and `conbytes` types, [#315](https://github.com/pydantic/pydantic/issues/315) [@Gr1N](https://github.com/Gr1N) +* adding `MANIFEST.in` to include license in package `.tar.gz`, [#358](https://github.com/pydantic/pydantic/issues/358) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.18.0 (2019-01-13) + +* **breaking change**: don't call validators on keys of dictionaries, [#254](https://github.com/pydantic/pydantic/issues/254) by [@samuelcolvin](https://github.com/samuelcolvin) +* Fix validators with `always=True` when the default is `None` or the type is optional, also prevent + `whole` validators being called for sub-fields, fix [#132](https://github.com/pydantic/pydantic/issues/132) by [@samuelcolvin](https://github.com/samuelcolvin) +* improve documentation for settings priority and allow it to be easily changed, [#343](https://github.com/pydantic/pydantic/issues/343) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix `ignore_extra=False` and `allow_population_by_alias=True`, fix [#257](https://github.com/pydantic/pydantic/issues/257) by [@samuelcolvin](https://github.com/samuelcolvin) +* **breaking change**: Set `BaseConfig` attributes `min_anystr_length` and `max_anystr_length` to + `None` by default, fix [#349](https://github.com/pydantic/pydantic/issues/349) in [#350](https://github.com/pydantic/pydantic/issues/350) by [@tiangolo](https://github.com/tiangolo) +* add support for postponed annotations, [#348](https://github.com/pydantic/pydantic/issues/348) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.17.0 (2018-12-27) + +* fix schema for `timedelta` as number, [#325](https://github.com/pydantic/pydantic/issues/325) by [@tiangolo](https://github.com/tiangolo) +* prevent validators being called repeatedly after inheritance, [#327](https://github.com/pydantic/pydantic/issues/327) by [@samuelcolvin](https://github.com/samuelcolvin) +* prevent duplicate validator check in ipython, fix [#312](https://github.com/pydantic/pydantic/issues/312) by [@samuelcolvin](https://github.com/samuelcolvin) +* add "Using Pydantic" section to docs, [#323](https://github.com/pydantic/pydantic/issues/323) by [@tiangolo](https://github.com/tiangolo) & [#326](https://github.com/pydantic/pydantic/issues/326) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix schema generation for fields annotated as `: dict`, `: list`, + `: tuple` and `: set`, [#330](https://github.com/pydantic/pydantic/issues/330) & [#335](https://github.com/pydantic/pydantic/issues/335) by [@nkonin](https://github.com/nkonin) +* add support for constrained strings as dict keys in schema, [#332](https://github.com/pydantic/pydantic/issues/332) by [@tiangolo](https://github.com/tiangolo) +* support for passing Config class in dataclasses decorator, [#276](https://github.com/pydantic/pydantic/issues/276) by [@jarekkar](https://github.com/jarekkar) + (**breaking change**: this supersedes the `validate_assignment` argument with `config`) +* support for nested dataclasses, [#334](https://github.com/pydantic/pydantic/issues/334) by [@samuelcolvin](https://github.com/samuelcolvin) +* better errors when getting an `ImportError` with `PyObject`, [#309](https://github.com/pydantic/pydantic/issues/309) by [@samuelcolvin](https://github.com/samuelcolvin) +* rename `get_validators` to `__get_validators__`, deprecation warning on use of old name, [#338](https://github.com/pydantic/pydantic/issues/338) by [@samuelcolvin](https://github.com/samuelcolvin) +* support `ClassVar` by excluding such attributes from fields, [#184](https://github.com/pydantic/pydantic/issues/184) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.16.1 (2018-12-10) + +* fix `create_model` to correctly use the passed `__config__`, [#320](https://github.com/pydantic/pydantic/issues/320) by [@hugoduncan](https://github.com/hugoduncan) + +## v0.16.0 (2018-12-03) + +* **breaking change**: refactor schema generation to be compatible with JSON Schema and OpenAPI specs, [#308](https://github.com/pydantic/pydantic/issues/308) by [@tiangolo](https://github.com/tiangolo) +* add `schema` to `schema` module to generate top-level schemas from base models, [#308](https://github.com/pydantic/pydantic/issues/308) by [@tiangolo](https://github.com/tiangolo) +* add additional fields to `Schema` class to declare validation for `str` and numeric values, [#311](https://github.com/pydantic/pydantic/issues/311) by [@tiangolo](https://github.com/tiangolo) +* rename `_schema` to `schema` on fields, [#318](https://github.com/pydantic/pydantic/issues/318) by [@samuelcolvin](https://github.com/samuelcolvin) +* add `case_insensitive` option to `BaseSettings` `Config`, [#277](https://github.com/pydantic/pydantic/issues/277) by [@jasonkuhrt](https://github.com/jasonkuhrt) + +## v0.15.0 (2018-11-18) + +* move codebase to use black, [#287](https://github.com/pydantic/pydantic/issues/287) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix alias use in settings, [#286](https://github.com/pydantic/pydantic/issues/286) by [@jasonkuhrt](https://github.com/jasonkuhrt) and [@samuelcolvin](https://github.com/samuelcolvin) +* fix datetime parsing in `parse_date`, [#298](https://github.com/pydantic/pydantic/issues/298) by [@samuelcolvin](https://github.com/samuelcolvin) +* allow dataclass inheritance, fix [#293](https://github.com/pydantic/pydantic/issues/293) by [@samuelcolvin](https://github.com/samuelcolvin) +* fix `PyObject = None`, fix [#305](https://github.com/pydantic/pydantic/issues/305) by [@samuelcolvin](https://github.com/samuelcolvin) +* allow `Pattern` type, fix [#303](https://github.com/pydantic/pydantic/issues/303) by [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.14.0 (2018-10-02) + +* dataclasses decorator, [#269](https://github.com/pydantic/pydantic/issues/269) by [@Gaunt](https://github.com/Gaunt) and [@samuelcolvin](https://github.com/samuelcolvin) + +## v0.13.1 (2018-09-21) + +* fix issue where int_validator doesn't cast a `bool` to an `int` [#264](https://github.com/pydantic/pydantic/issues/264) by [@nphyatt](https://github.com/nphyatt) +* add deep copy support for `BaseModel.copy()` [#249](https://github.com/pydantic/pydantic/issues/249), [@gangefors](https://github.com/gangefors) + +## v0.13.0 (2018-08-25) + +* raise an exception if a field's name shadows an existing `BaseModel` attribute [#242](https://github.com/pydantic/pydantic/issues/242) +* add `UrlStr` and `urlstr` types [#236](https://github.com/pydantic/pydantic/issues/236) +* timedelta json encoding ISO8601 and total seconds, custom json encoders [#247](https://github.com/pydantic/pydantic/issues/247), by [@cfkanesan](https://github.com/cfkanesan) and [@samuelcolvin](https://github.com/samuelcolvin) +* allow `timedelta` objects as values for properties of type `timedelta` (matches `datetime` etc. behavior) [#247](https://github.com/pydantic/pydantic/issues/247) + +## v0.12.1 (2018-07-31) + +* fix schema generation for fields defined using `typing.Any` [#237](https://github.com/pydantic/pydantic/issues/237) + +## v0.12.0 (2018-07-31) + +* add `by_alias` argument in `.dict()` and `.json()` model methods [#205](https://github.com/pydantic/pydantic/issues/205) +* add Json type support [#214](https://github.com/pydantic/pydantic/issues/214) +* support tuples [#227](https://github.com/pydantic/pydantic/issues/227) +* major improvements and changes to schema [#213](https://github.com/pydantic/pydantic/issues/213) + +## v0.11.2 (2018-07-05) + +* add `NewType` support [#115](https://github.com/pydantic/pydantic/issues/115) +* fix `list`, `set` & `tuple` validation [#225](https://github.com/pydantic/pydantic/issues/225) +* separate out `validate_model` method, allow errors to be returned along with valid values [#221](https://github.com/pydantic/pydantic/issues/221) + +## v0.11.1 (2018-07-02) + +* support Python 3.7 [#216](https://github.com/pydantic/pydantic/issues/216), thanks [@layday](https://github.com/layday) +* Allow arbitrary types in model [#209](https://github.com/pydantic/pydantic/issues/209), thanks [@oldPadavan](https://github.com/oldPadavan) + +## v0.11.0 (2018-06-28) + +* make `list`, `tuple` and `set` types stricter [#86](https://github.com/pydantic/pydantic/issues/86) +* **breaking change**: remove msgpack parsing [#201](https://github.com/pydantic/pydantic/issues/201) +* add `FilePath` and `DirectoryPath` types [#10](https://github.com/pydantic/pydantic/issues/10) +* model schema generation [#190](https://github.com/pydantic/pydantic/issues/190) +* JSON serialisation of models and schemas [#133](https://github.com/pydantic/pydantic/issues/133) + +## v0.10.0 (2018-06-11) + +* add `Config.allow_population_by_alias` [#160](https://github.com/pydantic/pydantic/issues/160), thanks [@bendemaree](https://github.com/bendemaree) +* **breaking change**: new errors format [#179](https://github.com/pydantic/pydantic/issues/179), thanks [@Gr1N](https://github.com/Gr1N) +* **breaking change**: removed `Config.min_number_size` and `Config.max_number_size` [#183](https://github.com/pydantic/pydantic/issues/183), thanks [@Gr1N](https://github.com/Gr1N) +* **breaking change**: correct behaviour of `lt` and `gt` arguments to `conint` etc. [#188](https://github.com/pydantic/pydantic/issues/188) + for the old behaviour use `le` and `ge` [#194](https://github.com/pydantic/pydantic/issues/194), thanks [@jaheba](https://github.com/jaheba) +* added error context and ability to redefine error message templates using `Config.error_msg_templates` [#183](https://github.com/pydantic/pydantic/issues/183), + thanks [@Gr1N](https://github.com/Gr1N) +* fix typo in validator exception [#150](https://github.com/pydantic/pydantic/issues/150) +* copy defaults to model values, so different models don't share objects [#154](https://github.com/pydantic/pydantic/issues/154) + +## v0.9.1 (2018-05-10) + +* allow custom `get_field_config` on config classes [#159](https://github.com/pydantic/pydantic/issues/159) +* add `UUID1`, `UUID3`, `UUID4` and `UUID5` types [#167](https://github.com/pydantic/pydantic/issues/167), thanks [@Gr1N](https://github.com/Gr1N) +* modify some inconsistent docstrings and annotations [#173](https://github.com/pydantic/pydantic/issues/173), thanks [@YannLuo](https://github.com/YannLuo) +* fix type annotations for exotic types [#171](https://github.com/pydantic/pydantic/issues/171), thanks [@Gr1N](https://github.com/Gr1N) +* re-use type validators in exotic types [#171](https://github.com/pydantic/pydantic/issues/171) +* scheduled monthly requirements updates [#168](https://github.com/pydantic/pydantic/issues/168) +* add `Decimal`, `ConstrainedDecimal` and `condecimal` types [#170](https://github.com/pydantic/pydantic/issues/170), thanks [@Gr1N](https://github.com/Gr1N) + +## v0.9.0 (2018-04-28) + +* tweak email-validator import error message [#145](https://github.com/pydantic/pydantic/issues/145) +* fix parse error of `parse_date()` and `parse_datetime()` when input is 0 [#144](https://github.com/pydantic/pydantic/issues/144), thanks [@YannLuo](https://github.com/YannLuo) +* add `Config.anystr_strip_whitespace` and `strip_whitespace` kwarg to `constr`, + by default values is `False` [#163](https://github.com/pydantic/pydantic/issues/163), thanks [@Gr1N](https://github.com/Gr1N) +* add `ConstrainedFloat`, `confloat`, `PositiveFloat` and `NegativeFloat` types [#166](https://github.com/pydantic/pydantic/issues/166), thanks [@Gr1N](https://github.com/Gr1N) + +## v0.8.0 (2018-03-25) + +* fix type annotation for `inherit_config` [#139](https://github.com/pydantic/pydantic/issues/139) +* **breaking change**: check for invalid field names in validators [#140](https://github.com/pydantic/pydantic/issues/140) +* validate attributes of parent models [#141](https://github.com/pydantic/pydantic/issues/141) +* **breaking change**: email validation now uses + [email-validator](https://github.com/JoshData/python-email-validator) [#142](https://github.com/pydantic/pydantic/issues/142) + +## v0.7.1 (2018-02-07) + +* fix bug with `create_model` modifying the base class + +## v0.7.0 (2018-02-06) + +* added compatibility with abstract base classes (ABCs) [#123](https://github.com/pydantic/pydantic/issues/123) +* add `create_model` method [#113](https://github.com/pydantic/pydantic/issues/113) [#125](https://github.com/pydantic/pydantic/issues/125) +* **breaking change**: rename `.config` to `.__config__` on a model +* **breaking change**: remove deprecated `.values()` on a model, use `.dict()` instead +* remove use of `OrderedDict` and use simple dict [#126](https://github.com/pydantic/pydantic/issues/126) +* add `Config.use_enum_values` [#127](https://github.com/pydantic/pydantic/issues/127) +* add wildcard validators of the form `@validate('*')` [#128](https://github.com/pydantic/pydantic/issues/128) + +## v0.6.4 (2018-02-01) + +* allow Python date and times objects [#122](https://github.com/pydantic/pydantic/issues/122) + +## v0.6.3 (2017-11-26) + +* fix direct install without `README.rst` present + +## v0.6.2 (2017-11-13) + +* errors for invalid validator use +* safer check for complex models in `Settings` + +## v0.6.1 (2017-11-08) + +* prevent duplicate validators, [#101](https://github.com/pydantic/pydantic/issues/101) +* add `always` kwarg to validators, [#102](https://github.com/pydantic/pydantic/issues/102) + +## v0.6.0 (2017-11-07) + +* assignment validation [#94](https://github.com/pydantic/pydantic/issues/94), thanks petroswork! +* JSON in environment variables for complex types, [#96](https://github.com/pydantic/pydantic/issues/96) +* add `validator` decorators for complex validation, [#97](https://github.com/pydantic/pydantic/issues/97) +* depreciate `values(...)` and replace with `.dict(...)`, [#99](https://github.com/pydantic/pydantic/issues/99) + +## v0.5.0 (2017-10-23) + +* add `UUID` validation [#89](https://github.com/pydantic/pydantic/issues/89) +* remove `index` and `track` from error object (json) if they're null [#90](https://github.com/pydantic/pydantic/issues/90) +* improve the error text when a list is provided rather than a dict [#90](https://github.com/pydantic/pydantic/issues/90) +* add benchmarks table to docs [#91](https://github.com/pydantic/pydantic/issues/91) + +## v0.4.0 (2017-07-08) + +* show length in string validation error +* fix aliases in config during inheritance [#55](https://github.com/pydantic/pydantic/issues/55) +* simplify error display +* use unicode ellipsis in `truncate` +* add `parse_obj`, `parse_raw` and `parse_file` helper functions [#58](https://github.com/pydantic/pydantic/issues/58) +* switch annotation only fields to come first in fields list not last + +## v0.3.0 (2017-06-21) + +* immutable models via `config.allow_mutation = False`, associated cleanup and performance improvement [#44](https://github.com/pydantic/pydantic/issues/44) +* immutable helper methods `construct()` and `copy()` [#53](https://github.com/pydantic/pydantic/issues/53) +* allow pickling of models [#53](https://github.com/pydantic/pydantic/issues/53) +* `setattr` is removed as `__setattr__` is now intelligent [#44](https://github.com/pydantic/pydantic/issues/44) +* `raise_exception` removed, Models now always raise exceptions [#44](https://github.com/pydantic/pydantic/issues/44) +* instance method validators removed +* django-restful-framework benchmarks added [#47](https://github.com/pydantic/pydantic/issues/47) +* fix inheritance bug [#49](https://github.com/pydantic/pydantic/issues/49) +* make str type stricter so list, dict etc are not coerced to strings. [#52](https://github.com/pydantic/pydantic/issues/52) +* add `StrictStr` which only always strings as input [#52](https://github.com/pydantic/pydantic/issues/52) + +## v0.2.1 (2017-06-07) + +* pypi and travis together messed up the deploy of `v0.2` this should fix it + +## v0.2.0 (2017-06-07) + +* **breaking change**: `values()` on a model is now a method not a property, + takes `include` and `exclude` arguments +* allow annotation only fields to support mypy +* add pretty `to_string(pretty=True)` method for models + +## v0.1.0 (2017-06-03) + +* add docs +* add history diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/RECORD new file mode 100644 index 00000000..841e903c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/RECORD @@ -0,0 +1,85 @@ +pydantic-1.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pydantic-1.10.1.dist-info/LICENSE,sha256=hTAP7jiruii5lyyjNd4zTlWK0rLDrk0xZOxd05vA63I,1148 +pydantic-1.10.1.dist-info/METADATA,sha256=aCaGBaEILP3YihwL6soTbNMDkzTINo81FDMOLRkqLcA,138610 +pydantic-1.10.1.dist-info/RECORD,, +pydantic-1.10.1.dist-info/WHEEL,sha256=-FqDJjo4HrkrIaThvtAYef2eEAn1_QvVe5nINCnUEbQ,101 +pydantic-1.10.1.dist-info/entry_points.txt,sha256=EquH5n3pilIXg-LLa1K4evpu5-6dnvxzi6vwvkoAMns,45 +pydantic-1.10.1.dist-info/top_level.txt,sha256=cmo_5n0F_YY5td5nPZBfdjBENkmGg_pE5ShWXYbXxTM,9 +pydantic/__init__.cp37-win_amd64.pyd,sha256=9EtGfi-DXFH5kHn0ZVxxmSn1SAPIwIBc6WAxMn1vh3k,34304 +pydantic/__init__.py,sha256=IMonwleOL8_ijc_z8hbBS2Zr1VbJ3wZKEHyZTk7jAqI,2902 +pydantic/__pycache__/__init__.cpython-37.pyc,, +pydantic/__pycache__/_hypothesis_plugin.cpython-37.pyc,, +pydantic/__pycache__/annotated_types.cpython-37.pyc,, +pydantic/__pycache__/class_validators.cpython-37.pyc,, +pydantic/__pycache__/color.cpython-37.pyc,, +pydantic/__pycache__/config.cpython-37.pyc,, +pydantic/__pycache__/dataclasses.cpython-37.pyc,, +pydantic/__pycache__/datetime_parse.cpython-37.pyc,, +pydantic/__pycache__/decorator.cpython-37.pyc,, +pydantic/__pycache__/env_settings.cpython-37.pyc,, +pydantic/__pycache__/error_wrappers.cpython-37.pyc,, +pydantic/__pycache__/errors.cpython-37.pyc,, +pydantic/__pycache__/fields.cpython-37.pyc,, +pydantic/__pycache__/generics.cpython-37.pyc,, +pydantic/__pycache__/json.cpython-37.pyc,, +pydantic/__pycache__/main.cpython-37.pyc,, +pydantic/__pycache__/mypy.cpython-37.pyc,, +pydantic/__pycache__/networks.cpython-37.pyc,, +pydantic/__pycache__/parse.cpython-37.pyc,, +pydantic/__pycache__/schema.cpython-37.pyc,, +pydantic/__pycache__/tools.cpython-37.pyc,, +pydantic/__pycache__/types.cpython-37.pyc,, +pydantic/__pycache__/typing.cpython-37.pyc,, +pydantic/__pycache__/utils.cpython-37.pyc,, +pydantic/__pycache__/validators.cpython-37.pyc,, +pydantic/__pycache__/version.cpython-37.pyc,, +pydantic/_hypothesis_plugin.cp37-win_amd64.pyd,sha256=AVm6P6jGU5f1VmYMDO3Ah3t4Pc_prUvZaPsN2JOcHJ4,154112 +pydantic/_hypothesis_plugin.py,sha256=FVkVK7E7KUxJ5lBfYEtBrVyzZGPIOhL6FI1y4Mgx6xQ,14982 +pydantic/annotated_types.cp37-win_amd64.pyd,sha256=Wirz_vRbxCkNeSkJyaN1dKKKYSRM1I2QtCqPn5peixg,67072 +pydantic/annotated_types.py,sha256=nX2rXe0W24YU_ZBOxfSq3v_gE_WkliUaBkNu_BP4K_I,3196 +pydantic/class_validators.cp37-win_amd64.pyd,sha256=BsFXV5YCuY7GEN7WzV_8C8EzMw1uwBY0jHqV8KSgocE,183296 +pydantic/class_validators.py,sha256=RzsbOgU6D5HlRtzWglGtci5lob8deQjZBCTbJ7W6Jvc,14173 +pydantic/color.cp37-win_amd64.pyd,sha256=I1OZsbmnyfTdg8XllBjeroHKhvuwSphbjoA_5D40gyI,224256 +pydantic/color.py,sha256=Dof59e3zmdlBJAGFEnm10RQW_jId655sL_ojIOUzQQ8,17305 +pydantic/config.cp37-win_amd64.pyd,sha256=S0Bu5TKL4GG50a84zxJ3JFs_Lmr8xobsk8wHRAGNDtY,79360 +pydantic/config.py,sha256=ftQ1PIX-x2C1i4I7nt8QANrc-uKebmVvaXBWZ5ODoSc,6682 +pydantic/dataclasses.cp37-win_amd64.pyd,sha256=_8pkOT9iK_BJxJzHZHd4_NeQSioDOlMBO2C6cKv1QQY,177152 +pydantic/dataclasses.py,sha256=7WrYzv-3cGTaUixG6sRT_8iiypcNjnyQM6Cnu9tbDZg,17175 +pydantic/datetime_parse.cp37-win_amd64.pyd,sha256=qS_u19--OLIFTvcd9KEty5DiqgG8ffkMOLdRshMRycQ,96256 +pydantic/datetime_parse.py,sha256=iOM3uNOwF9ReLjZTgtoUd5Qc2LqFBH51FUQ7-E61Wk4,7962 +pydantic/decorator.cp37-win_amd64.pyd,sha256=0Qii4T9JcQsh8rlIuhf_3i1IcwQe4PNlIoEDx59A1dU,134144 +pydantic/decorator.py,sha256=Yc9SUTzNgcOm_y481pqdtYsGs3vQQDAT77SMtp_A9rE,10527 +pydantic/env_settings.cp37-win_amd64.pyd,sha256=-sS38CRblnLuwL18FNidv8TPA6cLyKGvCa3NJmi0pUE,169472 +pydantic/env_settings.py,sha256=c0bnrFTcUEhKU3HZ7IYJzY9_97K7IQEyWkCiL2bPog8,14238 +pydantic/error_wrappers.cp37-win_amd64.pyd,sha256=TrVhyAsNQbmCXTJEhEVEkXJtuqYRoXfE_KIK2kuEuyU,117248 +pydantic/error_wrappers.py,sha256=ughxvoJSWSBHoQqTOiKIElrgCdTrKdfd4aXPbEUyufc,5304 +pydantic/errors.cp37-win_amd64.pyd,sha256=P11lPsK40fJTcV-14g2rQ0rNKB2_46yJpntmFAnzM7s,184320 +pydantic/errors.py,sha256=qpw1Ac9YcJOwPkkGl_ejt5k1L92YJLnQ7kKv33DvPHE,18339 +pydantic/fields.cp37-win_amd64.pyd,sha256=Gu8PLJGMyzHttbgwL_ljVxsyrbg0YDwF4qbQLxwBrgM,391168 +pydantic/fields.py,sha256=zNH8Ycr_5IXQCPlKylqBZQDfncN9z3hBVx4Toj2BFkA,51065 +pydantic/generics.py,sha256=k66LdBXhSPgKgE8LPVI-drKFmZvA7sCjSt3DxRGjGec,16355 +pydantic/json.cp37-win_amd64.pyd,sha256=y6JhxjxeG80ycepr3OcjvRGVPfLWr7Es3Ger8ZX3dF0,69632 +pydantic/json.py,sha256=oBUAWTZ9WHdpNPOVG9YFAo84RBJwLnJ1UZXprrKJ6pY,3458 +pydantic/main.cp37-win_amd64.pyd,sha256=_huKlo0DxuiThhdJooVBK_SPkaa1PPpBLKAHqteJiVU,385024 +pydantic/main.py,sha256=XhcB5wIK_4CAOg1uQjbtAMyMGfoOz-OWxw2CNSiCz_Q,45424 +pydantic/mypy.cp37-win_amd64.pyd,sha256=QLhAtuez3eR1yylExJUtnhO8Rxg_KpSrMwIf65vDu2Y,318464 +pydantic/mypy.py,sha256=uKv7WbD0YZ0jEbaBbsAmUh0dPUHQh2yF0XbEByaeDiI,34833 +pydantic/networks.cp37-win_amd64.pyd,sha256=lYLsG-uKvGztgGHg937AGbqe3OaJpQmgQs4iU-3YPNc,256512 +pydantic/networks.py,sha256=uCbTpO-rte3-22kZB4GG5pCJ6xBPdrcZlizkOQehvHU,22997 +pydantic/parse.cp37-win_amd64.pyd,sha256=RJ5SDKOrUMT5HgkgUiYUvrU4HN1oYdt9Gw_aAsfUNzE,50176 +pydantic/parse.py,sha256=LIo38lV-iaHWb1cWxryDHo79UQ7GUC2bHOwTPherYbM,1876 +pydantic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/schema.cp37-win_amd64.pyd,sha256=7qgvUK5zkiKUGWGmTxLL3aBGOAh1Lhn5tZ4PKlvU_uU,346624 +pydantic/schema.py,sha256=rLLcDLO-xLgY6376uqwfzk7f8MHkkcLg0imMP2IpF58,48299 +pydantic/tools.cp37-win_amd64.pyd,sha256=euNZ3DFXbly9W34X3PTs__BxYGk6BQkQaAhtYhHOx4w,64000 +pydantic/tools.py,sha256=QS5T9ySTA5AeS-OvZN0dMoODMMtDq4Rnef5y_11xQ28,2926 +pydantic/types.cp37-win_amd64.pyd,sha256=WHmeE79EXUy1GfgWyF_aXTlIKwOdiv0iqaKV_-MqXZM,354816 +pydantic/types.py,sha256=Nc40CFsSunzepVvqtS7GeuGQpQtR1lt7adsa5qEV-ZM,35935 +pydantic/typing.cp37-win_amd64.pyd,sha256=nK-a8UlWiuTJSZruMQDBd5MSEhzyx5tPMtx7s88kLZ0,200704 +pydantic/typing.py,sha256=z35TPZ61qD3REnI_n63WGJqf1XSl1AMZoozdyr6RNvs,19745 +pydantic/utils.cp37-win_amd64.pyd,sha256=Dsyof-WCL7BfKFJbh_SiPyp5tG8sBk5Rorczixq52WE,305152 +pydantic/utils.py,sha256=6uzru0XY9WqAQBtPiFQ5rBS3ciFFwB263tY0xoajSGk,27939 +pydantic/validators.cp37-win_amd64.pyd,sha256=L-CpvzoKeXi1nSoO15X00L30E_Qx-QA6u5mikxBQoUs,264704 +pydantic/validators.py,sha256=0mBiSyEXDdkKJmyE6JVV9js_6Sf9ZVQRcd4BGlu-Ltc,21813 +pydantic/version.cp37-win_amd64.pyd,sha256=qjuAk3r9x5ea7ynIKEoQRn3M2y0uadJqsIdnRkNATCU,53760 +pydantic/version.py,sha256=Nj1AXdUuCRdtXK6j_ZTjKlz_iTW1nAc0PLiPEJ21Khg,1076 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/WHEEL new file mode 100644 index 00000000..50364dcd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: false +Tag: cp37-cp37m-win_amd64 + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/entry_points.txt new file mode 100644 index 00000000..6178b39c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[hypothesis] +_ = pydantic._hypothesis_plugin diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/top_level.txt new file mode 100644 index 00000000..572b352f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic-1.10.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pydantic diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.cp37-win_amd64.pyd new file mode 100644 index 00000000..10f0e8bc Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.py new file mode 100644 index 00000000..3bf1418f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/__init__.py @@ -0,0 +1,131 @@ +# flake8: noqa +from . import dataclasses +from .annotated_types import create_model_from_namedtuple, create_model_from_typeddict +from .class_validators import root_validator, validator +from .config import BaseConfig, ConfigDict, Extra +from .decorator import validate_arguments +from .env_settings import BaseSettings +from .error_wrappers import ValidationError +from .errors import * +from .fields import Field, PrivateAttr, Required +from .main import * +from .networks import * +from .parse import Protocol +from .tools import * +from .types import * +from .version import VERSION, compiled + +__version__ = VERSION + +# WARNING __all__ from .errors is not included here, it will be removed as an export here in v2 +# please use "from pydantic.errors import ..." instead +__all__ = [ + # annotated types utils + 'create_model_from_namedtuple', + 'create_model_from_typeddict', + # dataclasses + 'dataclasses', + # class_validators + 'root_validator', + 'validator', + # config + 'BaseConfig', + 'ConfigDict', + 'Extra', + # decorator + 'validate_arguments', + # env_settings + 'BaseSettings', + # error_wrappers + 'ValidationError', + # fields + 'Field', + 'Required', + # main + 'BaseModel', + 'create_model', + 'validate_model', + # network + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'HttpUrl', + 'stricturl', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'validate_email', + # parse + 'Protocol', + # tools + 'parse_file_as', + 'parse_obj_as', + 'parse_raw_as', + 'schema_of', + 'schema_json_of', + # types + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', + 'StrictStr', + 'ConstrainedBytes', + 'conbytes', + 'ConstrainedList', + 'conlist', + 'ConstrainedSet', + 'conset', + 'ConstrainedFrozenSet', + 'confrozenset', + 'ConstrainedStr', + 'constr', + 'PyObject', + 'ConstrainedInt', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'ConstrainedFloat', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'ConstrainedDecimal', + 'condecimal', + 'ConstrainedDate', + 'condate', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'Json', + 'JsonWrapper', + 'SecretField', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'PrivateAttr', + 'ByteSize', + 'PastDate', + 'FutureDate', + # version + 'compiled', + 'VERSION', +] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.cp37-win_amd64.pyd new file mode 100644 index 00000000..bb873171 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.py new file mode 100644 index 00000000..a56d2b98 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/_hypothesis_plugin.py @@ -0,0 +1,386 @@ +""" +Register Hypothesis strategies for Pydantic custom types. + +This enables fully-automatic generation of test data for most Pydantic classes. + +Note that this module has *no* runtime impact on Pydantic itself; instead it +is registered as a setuptools entry point and Hypothesis will import it if +Pydantic is installed. See also: + +https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points +https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy +https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov +https://pydantic-docs.helpmanual.io/usage/types/#pydantic-types + +Note that because our motivation is to *improve user experience*, the strategies +are always sound (never generate invalid data) but sacrifice completeness for +maintainability (ie may be unable to generate some tricky but valid data). + +Finally, this module makes liberal use of `# type: ignore[]` pragmas. +This is because Hypothesis annotates `register_type_strategy()` with +`(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt` +to generate instances of the builtin `int` type which match the constraints. +""" + +import contextlib +import datetime +import ipaddress +import json +import math +from fractions import Fraction +from typing import Callable, Dict, Type, Union, cast, overload + +import hypothesis.strategies as st + +import pydantic +import pydantic.color +import pydantic.types +from pydantic.utils import lenient_issubclass + +# FilePath and DirectoryPath are explicitly unsupported, as we'd have to create +# them on-disk, and that's unsafe in general without being told *where* to do so. +# +# URLs are unsupported because it's easy for users to define their own strategy for +# "normal" URLs, and hard for us to define a general strategy which includes "weird" +# URLs but doesn't also have unpredictable performance problems. +# +# conlist() and conset() are unsupported for now, because the workarounds for +# Cython and Hypothesis to handle parametrized generic types are incompatible. +# Once Cython can support 'normal' generics we'll revisit this. + +# Emails +try: + import email_validator +except ImportError: # pragma: no cover + pass +else: + + def is_valid_email(s: str) -> bool: + # Hypothesis' st.emails() occasionally generates emails like 0@A0--0.ac + # that are invalid according to email-validator, so we filter those out. + try: + email_validator.validate_email(s, check_deliverability=False) + return True + except email_validator.EmailNotValidError: # pragma: no cover + return False + + # Note that these strategies deliberately stay away from any tricky Unicode + # or other encoding issues; we're just trying to generate *something* valid. + st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email)) # type: ignore[arg-type] + st.register_type_strategy( + pydantic.NameEmail, + st.builds( + '{} <{}>'.format, # type: ignore[arg-type] + st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True), + st.emails().filter(is_valid_email), + ), + ) + +# PyObject - dotted names, in this case taken from the math module. +st.register_type_strategy( + pydantic.PyObject, # type: ignore[arg-type] + st.sampled_from( + [cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')] + ), +) + +# CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings +_color_regexes = ( + '|'.join( + ( + pydantic.color.r_hex_short, + pydantic.color.r_hex_long, + pydantic.color.r_rgb, + pydantic.color.r_rgba, + pydantic.color.r_hsl, + pydantic.color.r_hsla, + ) + ) + # Use more precise regex patterns to avoid value-out-of-range errors + .replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)') + .replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))') + .replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))') +) +st.register_type_strategy( + pydantic.color.Color, + st.one_of( + st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)), + st.tuples( + st.integers(0, 255), + st.integers(0, 255), + st.integers(0, 255), + st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format), + ), + st.from_regex(_color_regexes, fullmatch=True), + ), +) + + +# Card numbers, valid according to the Luhn algorithm + + +def add_luhn_digit(card_number: str) -> str: + # See https://en.wikipedia.org/wiki/Luhn_algorithm + for digit in '0123456789': + with contextlib.suppress(Exception): + pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit) + return card_number + digit + raise AssertionError('Unreachable') # pragma: no cover + + +card_patterns = ( + # Note that these patterns omit the Luhn check digit; that's added by the function above + '4[0-9]{14}', # Visa + '5[12345][0-9]{13}', # Mastercard + '3[47][0-9]{12}', # American Express + '[0-26-9][0-9]{10,17}', # other (incomplete to avoid overlap) +) +st.register_type_strategy( + pydantic.PaymentCardNumber, + st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit), # type: ignore[arg-type] +) + +# UUIDs +st.register_type_strategy(pydantic.UUID1, st.uuids(version=1)) +st.register_type_strategy(pydantic.UUID3, st.uuids(version=3)) +st.register_type_strategy(pydantic.UUID4, st.uuids(version=4)) +st.register_type_strategy(pydantic.UUID5, st.uuids(version=5)) + +# Secrets +st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes)) +st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr)) + +# IP addresses, networks, and interfaces +st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses()) # type: ignore[arg-type] +st.register_type_strategy( + pydantic.IPvAnyInterface, + st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface), # type: ignore[arg-type] +) +st.register_type_strategy( + pydantic.IPvAnyNetwork, + st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network), # type: ignore[arg-type] +) + +# We hook into the con***() functions and the ConstrainedNumberMeta metaclass, +# so here we only have to register subclasses for other constrained types which +# don't go via those mechanisms. Then there are the registration hooks below. +st.register_type_strategy(pydantic.StrictBool, st.booleans()) +st.register_type_strategy(pydantic.StrictStr, st.text()) + + +# Constrained-type resolver functions +# +# For these ones, we actually want to inspect the type in order to work out a +# satisfying strategy. First up, the machinery for tracking resolver functions: + +RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {} # type: ignore[type-arg] + + +@overload +def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]: + pass + + +@overload +def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta: + pass + + +def _registered( + typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta] +) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]: + # This function replaces the version in `pydantic.types`, in order to + # effect the registration of new constrained types so that Hypothesis + # can generate valid examples. + pydantic.types._DEFINED_TYPES.add(typ) + for supertype, resolver in RESOLVERS.items(): + if issubclass(typ, supertype): + st.register_type_strategy(typ, resolver(typ)) # type: ignore + return typ + raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register') # pragma: no cover + + +def resolves( + typ: Union[type, pydantic.types.ConstrainedNumberMeta] +) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]: # type: ignore[type-arg] + def inner(f): # type: ignore + assert f not in RESOLVERS + RESOLVERS[typ] = f + return f + + return inner + + +# Type-to-strategy resolver functions + + +@resolves(pydantic.JsonWrapper) +def resolve_json(cls): # type: ignore[no-untyped-def] + try: + inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type) + except Exception: # pragma: no cover + finite = st.floats(allow_infinity=False, allow_nan=False) + inner = st.recursive( + base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()), + extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x), # type: ignore + ) + inner_type = getattr(cls, 'inner_type', None) + return st.builds( + cls.inner_type.json if lenient_issubclass(inner_type, pydantic.BaseModel) else json.dumps, + inner, + ensure_ascii=st.booleans(), + indent=st.none() | st.integers(0, 16), + sort_keys=st.booleans(), + ) + + +@resolves(pydantic.ConstrainedBytes) +def resolve_conbytes(cls): # type: ignore[no-untyped-def] # pragma: no cover + min_size = cls.min_length or 0 + max_size = cls.max_length + if not cls.strip_whitespace: + return st.binary(min_size=min_size, max_size=max_size) + # Fun with regex to ensure we neither start nor end with whitespace + repeats = '{{{},{}}}'.format( + min_size - 2 if min_size > 2 else 0, + max_size - 2 if (max_size or 0) > 2 else '', + ) + if min_size >= 2: + pattern = rf'\W.{repeats}\W' + elif min_size == 1: + pattern = rf'\W(.{repeats}\W)?' + else: + assert min_size == 0 + pattern = rf'(\W(.{repeats}\W)?)?' + return st.from_regex(pattern.encode(), fullmatch=True) + + +@resolves(pydantic.ConstrainedDecimal) +def resolve_condecimal(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt + s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places) + if cls.lt is not None: + s = s.filter(lambda d: d < cls.lt) + if cls.gt is not None: + s = s.filter(lambda d: cls.gt < d) + return s + + +@resolves(pydantic.ConstrainedFloat) +def resolve_confloat(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + exclude_min = False + exclude_max = False + + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + exclude_min = True + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt + exclude_max = True + + if cls.multiple_of is None: + return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False) + + if min_value is not None: + min_value = math.ceil(min_value / cls.multiple_of) + if exclude_min: + min_value = min_value + 1 + if max_value is not None: + assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of' + max_value = math.floor(max_value / cls.multiple_of) + if exclude_max: + max_value = max_value - 1 + + return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) + + +@resolves(pydantic.ConstrainedInt) +def resolve_conint(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + 1 + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt - 1 + + if cls.multiple_of is None or cls.multiple_of == 1: + return st.integers(min_value, max_value) + + # These adjustments and the .map handle integer-valued multiples, while the + # .filter handles trickier cases as for confloat. + if min_value is not None: + min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of)) + if max_value is not None: + max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of)) + return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) + + +@resolves(pydantic.ConstrainedDate) +def resolve_condate(cls): # type: ignore[no-untyped-def] + if cls.ge is not None: + assert cls.gt is None, 'Set `gt` or `ge`, but not both' + min_value = cls.ge + elif cls.gt is not None: + min_value = cls.gt + datetime.timedelta(days=1) + else: + min_value = datetime.date.min + if cls.le is not None: + assert cls.lt is None, 'Set `lt` or `le`, but not both' + max_value = cls.le + elif cls.lt is not None: + max_value = cls.lt - datetime.timedelta(days=1) + else: + max_value = datetime.date.max + return st.dates(min_value, max_value) + + +@resolves(pydantic.ConstrainedStr) +def resolve_constr(cls): # type: ignore[no-untyped-def] # pragma: no cover + min_size = cls.min_length or 0 + max_size = cls.max_length + + if cls.regex is None and not cls.strip_whitespace: + return st.text(min_size=min_size, max_size=max_size) + + if cls.regex is not None: + strategy = st.from_regex(cls.regex) + if cls.strip_whitespace: + strategy = strategy.filter(lambda s: s == s.strip()) + elif cls.strip_whitespace: + repeats = '{{{},{}}}'.format( + min_size - 2 if min_size > 2 else 0, + max_size - 2 if (max_size or 0) > 2 else '', + ) + if min_size >= 2: + strategy = st.from_regex(rf'\W.{repeats}\W') + elif min_size == 1: + strategy = st.from_regex(rf'\W(.{repeats}\W)?') + else: + assert min_size == 0 + strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?') + + if min_size == 0 and max_size is None: + return strategy + elif max_size is None: + return strategy.filter(lambda s: min_size <= len(s)) + return strategy.filter(lambda s: min_size <= len(s) <= max_size) + + +# Finally, register all previously-defined types, and patch in our new function +for typ in list(pydantic.types._DEFINED_TYPES): + _registered(typ) +pydantic.types._registered = _registered +st.register_type_strategy(pydantic.Json, resolve_json) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.cp37-win_amd64.pyd new file mode 100644 index 00000000..f0f60557 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.py new file mode 100644 index 00000000..d333457f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/annotated_types.py @@ -0,0 +1,72 @@ +import sys +from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type + +from .fields import Required +from .main import BaseModel, create_model +from .typing import is_typeddict, is_typeddict_special + +if TYPE_CHECKING: + from typing_extensions import TypedDict + +if sys.version_info < (3, 11): + + def is_legacy_typeddict(typeddict_cls: Type['TypedDict']) -> bool: # type: ignore[valid-type] + return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == 'typing' + +else: + + def is_legacy_typeddict(_: Any) -> Any: + return False + + +def create_model_from_typeddict( + # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030 + typeddict_cls: Type['TypedDict'], # type: ignore[valid-type] + **kwargs: Any, +) -> Type['BaseModel']: + """ + Create a `BaseModel` based on the fields of a `TypedDict`. + Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys, + we raise an error if this happens (see https://bugs.python.org/issue38834). + """ + field_definitions: Dict[str, Any] + + # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions` + if not hasattr(typeddict_cls, '__required_keys__'): + raise TypeError( + 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. ' + 'Without it, there is no way to differentiate required and optional fields when subclassed.' + ) + + if is_legacy_typeddict(typeddict_cls) and any( + is_typeddict_special(t) for t in typeddict_cls.__annotations__.values() + ): + raise TypeError( + 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. ' + 'Without it, there is no way to reflect Required/NotRequired keys.' + ) + + required_keys: FrozenSet[str] = typeddict_cls.__required_keys__ # type: ignore[attr-defined] + field_definitions = { + field_name: (field_type, Required if field_name in required_keys else None) + for field_name, field_type in typeddict_cls.__annotations__.items() + } + + return create_model(typeddict_cls.__name__, **kwargs, **field_definitions) + + +def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']: + """ + Create a `BaseModel` based on the fields of a named tuple. + A named tuple can be created with `typing.NamedTuple` and declared annotations + but also with `collections.namedtuple`, in this case we consider all fields + to have type `Any`. + """ + # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic + namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or { + k: Any for k in namedtuple_cls._fields + } + field_definitions: Dict[str, Any] = { + field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items() + } + return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.cp37-win_amd64.pyd new file mode 100644 index 00000000..b51adb28 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.py new file mode 100644 index 00000000..87190610 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/class_validators.py @@ -0,0 +1,342 @@ +import warnings +from collections import ChainMap +from functools import wraps +from itertools import chain +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload + +from .errors import ConfigError +from .typing import AnyCallable +from .utils import ROOT_KEY, in_ipython + +if TYPE_CHECKING: + from .typing import AnyClassMethod + + +class Validator: + __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure' + + def __init__( + self, + func: AnyCallable, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = False, + skip_on_failure: bool = False, + ): + self.func = func + self.pre = pre + self.each_item = each_item + self.always = always + self.check_fields = check_fields + self.skip_on_failure = skip_on_failure + + +if TYPE_CHECKING: + from inspect import Signature + + from .config import BaseConfig + from .fields import ModelField + from .types import ModelOrDc + + ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] + ValidatorsList = List[ValidatorCallable] + ValidatorListDict = Dict[str, List[Validator]] + +_FUNCS: Set[str] = set() +VALIDATOR_CONFIG_KEY = '__validator_config__' +ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' + + +def validator( + *fields: str, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = True, + whole: bool = None, + allow_reuse: bool = False, +) -> Callable[[AnyCallable], 'AnyClassMethod']: + """ + Decorate methods on the class indicating that they should be used to validate fields + :param fields: which field(s) the method should be called on + :param pre: whether or not this validator should be called before the standard validators (else after) + :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the + whole object + :param always: whether this method and other validators should be called even if the value is missing + :param check_fields: whether to check that the fields actually exist on the model + :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function + """ + if not fields: + raise ConfigError('validator with no fields specified') + elif isinstance(fields[0], FunctionType): + raise ConfigError( + "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 + "E.g. usage should be `@validator('', ...)`" + ) + elif not all(isinstance(field, str) for field in fields): + raise ConfigError( + "validator fields should be passed as separate string args. " # noqa: Q000 + "E.g. usage should be `@validator('', '', ...)`" + ) + + if whole is not None: + warnings.warn( + 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', + DeprecationWarning, + ) + assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' + each_item = not whole + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, + VALIDATOR_CONFIG_KEY, + ( + fields, + Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), + ), + ) + return f_cls + + return dec + + +@overload +def root_validator(_func: AnyCallable) -> 'AnyClassMethod': + ... + + +@overload +def root_validator( + *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Callable[[AnyCallable], 'AnyClassMethod']: + ... + + +def root_validator( + _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]: + """ + Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either + before or after standard model parsing/validation is performed. + """ + if _func: + f_cls = _prepare_validator(_func, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + return dec + + +def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod': + """ + Avoid validators with duplicated names since without this, validators can be overwritten silently + which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. + """ + f_cls = function if isinstance(function, classmethod) else classmethod(function) + if not in_ipython() and not allow_reuse: + ref = f_cls.__func__.__module__ + '.' + f_cls.__func__.__qualname__ + if ref in _FUNCS: + raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') + _FUNCS.add(ref) + return f_cls + + +class ValidatorGroup: + def __init__(self, validators: 'ValidatorListDict') -> None: + self.validators = validators + self.used_validators = {'*'} + + def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: + self.used_validators.add(name) + validators = self.validators.get(name, []) + if name != ROOT_KEY: + validators += self.validators.get('*', []) + if validators: + return {v.func.__name__: v for v in validators} + else: + return None + + def check_for_unused(self) -> None: + unused_validators = set( + chain.from_iterable( + (v.func.__name__ for v in self.validators[f] if v.check_fields) + for f in (self.validators.keys() - self.used_validators) + ) + ) + if unused_validators: + fn = ', '.join(unused_validators) + raise ConfigError( + f"Validators defined with incorrect fields: {fn} " # noqa: Q000 + f"(use check_fields=False if you're inheriting from the model and intended this)" + ) + + +def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: + validators: Dict[str, List[Validator]] = {} + for var_name, value in namespace.items(): + validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) + if validator_config: + fields, v = validator_config + for field in fields: + if field in validators: + validators[field].append(v) + else: + validators[field] = [v] + return validators + + +def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: + from inspect import signature + + pre_validators: List[AnyCallable] = [] + post_validators: List[Tuple[bool, AnyCallable]] = [] + for name, value in namespace.items(): + validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) + if validator_config: + sig = signature(validator_config.func) + args = list(sig.parameters.keys()) + if args[0] == 'self': + raise ConfigError( + f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, values).' + ) + if len(args) != 2: + raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') + # check function signature + if validator_config.pre: + pre_validators.append(validator_config.func) + else: + post_validators.append((validator_config.skip_on_failure, validator_config.func)) + return pre_validators, post_validators + + +def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': + for field, field_validators in base_validators.items(): + if field not in validators: + validators[field] = [] + validators[field] += field_validators + return validators + + +def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': + """ + Make a generic function which calls a validator with the right arguments. + + Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, + hence this laborious way of doing things. + + It's done like this so validators don't all need **kwargs in their signature, eg. any combination of + the arguments "values", "fields" and/or "config" are permitted. + """ + from inspect import signature + + sig = signature(validator) + args = list(sig.parameters.keys()) + first_arg = args.pop(0) + if first_arg == 'self': + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + elif first_arg == 'cls': + # assume the second argument is value + return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) + else: + # assume the first argument was value which has already been removed + return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) + + +def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': + return [make_generic_validator(f) for f in v_funcs if f] + + +all_kwargs = {'values', 'field', 'config'} + + +def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + # assume the first argument is value + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(cls, v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(cls, v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + + +def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + + +def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']: + all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] + return { + k: v + for k, v in all_attributes.items() + if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) + } diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.cp37-win_amd64.pyd new file mode 100644 index 00000000..b716383f Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.py new file mode 100644 index 00000000..6fdc9fb1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/color.py @@ -0,0 +1,494 @@ +""" +Color definitions are used as per CSS3 specification: +http://www.w3.org/TR/css3-color/#svg-color + +A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. + +In these cases the LAST color when sorted alphabetically takes preferences, +eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". +""" +import math +import re +from colorsys import hls_to_rgb, rgb_to_hls +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast + +from .errors import ColorError +from .utils import Representation, almost_equal_floats + +if TYPE_CHECKING: + from .typing import CallableGenerator, ReprArgs + +ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] +ColorType = Union[ColorTuple, str] +HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] + + +class RGBA: + """ + Internal use only as a representation of a color. + """ + + __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' + + def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): + self.r = r + self.g = g + self.b = b + self.alpha = alpha + + self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) + + def __getitem__(self, item: Any) -> Any: + return self._tuple[item] + + +# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached +r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' +r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' +_r_255 = r'(\d{1,3}(?:\.\d+)?)' +_r_comma = r'\s*,\s*' +r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*' +_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' +r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*' +_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' +_r_sl = r'(\d{1,3}(?:\.\d+)?)%' +r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*' +r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*' + +# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used +repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} +rads = 2 * math.pi + + +class Color(Representation): + __slots__ = '_original', '_rgba' + + def __init__(self, value: ColorType) -> None: + self._rgba: RGBA + self._original: ColorType + if isinstance(value, (tuple, list)): + self._rgba = parse_tuple(value) + elif isinstance(value, str): + self._rgba = parse_str(value) + elif isinstance(value, Color): + self._rgba = value._rgba + value = value._original + else: + raise ColorError(reason='value must be a tuple, list or string') + + # if we've got here value must be a valid color + self._original = value + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='color') + + def original(self) -> ColorType: + """ + Original value passed to Color + """ + return self._original + + def as_named(self, *, fallback: bool = False) -> str: + if self._rgba.alpha is None: + rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) + try: + return COLORS_BY_VALUE[rgb] + except KeyError as e: + if fallback: + return self.as_hex() + else: + raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e + else: + return self.as_hex() + + def as_hex(self) -> str: + """ + Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string + a "short" representation of the color is possible and whether there's an alpha channel. + """ + values = [float_to_255(c) for c in self._rgba[:3]] + if self._rgba.alpha is not None: + values.append(float_to_255(self._rgba.alpha)) + + as_hex = ''.join(f'{v:02x}' for v in values) + if all(c in repeat_colors for c in values): + as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) + return '#' + as_hex + + def as_rgb(self) -> str: + """ + Color as an rgb(, , ) or rgba(, , , ) string. + """ + if self._rgba.alpha is None: + return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' + else: + return ( + f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' + f'{round(self._alpha_float(), 2)})' + ) + + def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: + """ + Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is + in the range 0 to 1. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + r, g, b = (float_to_255(c) for c in self._rgba[:3]) + if alpha is None: + if self._rgba.alpha is None: + return r, g, b + else: + return r, g, b, self._alpha_float() + elif alpha: + return r, g, b, self._alpha_float() + else: + # alpha is False + return r, g, b + + def as_hsl(self) -> str: + """ + Color as an hsl(, , ) or hsl(, , , ) string. + """ + if self._rgba.alpha is None: + h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' + else: + h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' + + def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: + """ + Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in + the range 0 to 1. + + NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) + if alpha is None: + if self._rgba.alpha is None: + return h, s, l + else: + return h, s, l, self._alpha_float() + if alpha: + return h, s, l, self._alpha_float() + else: + # alpha is False + return h, s, l + + def _alpha_float(self) -> float: + return 1 if self._rgba.alpha is None else self._rgba.alpha + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls + + def __str__(self) -> str: + return self.as_named(fallback=True) + + def __repr_args__(self) -> 'ReprArgs': + return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() + + def __hash__(self) -> int: + return hash(self.as_rgb_tuple()) + + +def parse_tuple(value: Tuple[Any, ...]) -> RGBA: + """ + Parse a tuple or list as a color. + """ + if len(value) == 3: + r, g, b = (parse_color_value(v) for v in value) + return RGBA(r, g, b, None) + elif len(value) == 4: + r, g, b = (parse_color_value(v) for v in value[:3]) + return RGBA(r, g, b, parse_float_alpha(value[3])) + else: + raise ColorError(reason='tuples must have length 3 or 4') + + +def parse_str(value: str) -> RGBA: + """ + Parse a string to an RGBA tuple, trying the following formats (in this order): + * named color, see COLORS_BY_NAME below + * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) + * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) + * `rgb(, , ) ` + * `rgba(, , , )` + """ + value_lower = value.lower() + try: + r, g, b = COLORS_BY_NAME[value_lower] + except KeyError: + pass + else: + return ints_to_rgba(r, g, b, None) + + m = re.fullmatch(r_hex_short, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v * 2, 16) for v in rgb) + if a: + alpha: Optional[float] = int(a * 2, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_hex_long, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v, 16) for v in rgb) + if a: + alpha = int(a, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_rgb, value_lower) + if m: + return ints_to_rgba(*m.groups(), None) # type: ignore + + m = re.fullmatch(r_rgba, value_lower) + if m: + return ints_to_rgba(*m.groups()) # type: ignore + + m = re.fullmatch(r_hsl, value_lower) + if m: + h, h_units, s, l_ = m.groups() + return parse_hsl(h, h_units, s, l_) + + m = re.fullmatch(r_hsla, value_lower) + if m: + h, h_units, s, l_, a = m.groups() + return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) + + raise ColorError(reason='string not recognised as a valid color') + + +def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: + return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) + + +def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: + """ + Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number + in the range 0 to 1 + """ + try: + color = float(value) + except ValueError: + raise ColorError(reason='color values must be a valid number') + if 0 <= color <= max_val: + return color / max_val + else: + raise ColorError(reason=f'color values must be in the range 0 to {max_val}') + + +def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: + """ + Parse a value checking it's a valid float in the range 0 to 1 + """ + if value is None: + return None + try: + if isinstance(value, str) and value.endswith('%'): + alpha = float(value[:-1]) / 100 + else: + alpha = float(value) + except ValueError: + raise ColorError(reason='alpha values must be a valid float') + + if almost_equal_floats(alpha, 1): + return None + elif 0 <= alpha <= 1: + return alpha + else: + raise ColorError(reason='alpha values must be in the range 0 to 1') + + +def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: + """ + Parse raw hue, saturation, lightness and alpha values and convert to RGBA. + """ + s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) + + h_value = float(h) + if h_units in {None, 'deg'}: + h_value = h_value % 360 / 360 + elif h_units == 'rad': + h_value = h_value % rads / rads + else: + # turns + h_value = h_value % 1 + + r, g, b = hls_to_rgb(h_value, l_value, s_value) + return RGBA(r, g, b, alpha) + + +def float_to_255(c: float) -> int: + return int(round(c * 255)) + + +COLORS_BY_NAME = { + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'aqua': (0, 255, 255), + 'aquamarine': (127, 255, 212), + 'azure': (240, 255, 255), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'burlywood': (222, 184, 135), + 'cadetblue': (95, 158, 160), + 'chartreuse': (127, 255, 0), + 'chocolate': (210, 105, 30), + 'coral': (255, 127, 80), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'darkblue': (0, 0, 139), + 'darkcyan': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgray': (169, 169, 169), + 'darkgreen': (0, 100, 0), + 'darkgrey': (169, 169, 169), + 'darkkhaki': (189, 183, 107), + 'darkmagenta': (139, 0, 139), + 'darkolivegreen': (85, 107, 47), + 'darkorange': (255, 140, 0), + 'darkorchid': (153, 50, 204), + 'darkred': (139, 0, 0), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deepskyblue': (0, 191, 255), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'firebrick': (178, 34, 34), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'fuchsia': (255, 0, 255), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'goldenrod': (218, 165, 32), + 'gray': (128, 128, 128), + 'green': (0, 128, 0), + 'greenyellow': (173, 255, 47), + 'grey': (128, 128, 128), + 'honeydew': (240, 255, 240), + 'hotpink': (255, 105, 180), + 'indianred': (205, 92, 92), + 'indigo': (75, 0, 130), + 'ivory': (255, 255, 240), + 'khaki': (240, 230, 140), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lightblue': (173, 216, 230), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgreen': (144, 238, 144), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightsalmon': (255, 160, 122), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightyellow': (255, 255, 224), + 'lime': (0, 255, 0), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'maroon': (128, 0, 0), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumpurple': (147, 112, 219), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navy': (0, 0, 128), + 'oldlace': (253, 245, 230), + 'olive': (128, 128, 0), + 'olivedrab': (107, 142, 35), + 'orange': (255, 165, 0), + 'orangered': (255, 69, 0), + 'orchid': (218, 112, 214), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'paleturquoise': (175, 238, 238), + 'palevioletred': (219, 112, 147), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'plum': (221, 160, 221), + 'powderblue': (176, 224, 230), + 'purple': (128, 0, 128), + 'red': (255, 0, 0), + 'rosybrown': (188, 143, 143), + 'royalblue': (65, 105, 225), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seashell': (255, 245, 238), + 'sienna': (160, 82, 45), + 'silver': (192, 192, 192), + 'skyblue': (135, 206, 235), + 'slateblue': (106, 90, 205), + 'slategray': (112, 128, 144), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'springgreen': (0, 255, 127), + 'steelblue': (70, 130, 180), + 'tan': (210, 180, 140), + 'teal': (0, 128, 128), + 'thistle': (216, 191, 216), + 'tomato': (255, 99, 71), + 'turquoise': (64, 224, 208), + 'violet': (238, 130, 238), + 'wheat': (245, 222, 179), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellowgreen': (154, 205, 50), +} + +COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.cp37-win_amd64.pyd new file mode 100644 index 00000000..58a31581 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.py new file mode 100644 index 00000000..74687ca0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/config.py @@ -0,0 +1,192 @@ +import json +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union + +from typing_extensions import Literal, Protocol + +from .typing import AnyArgTCallable, AnyCallable +from .utils import GetterDict +from .version import compiled + +if TYPE_CHECKING: + from typing import overload + + from .fields import ModelField + from .main import BaseModel + + ConfigType = Type['BaseConfig'] + + class SchemaExtraCallable(Protocol): + @overload + def __call__(self, schema: Dict[str, Any]) -> None: + pass + + @overload + def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: + pass + +else: + SchemaExtraCallable = Callable[..., None] + +__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config' + + +class Extra(str, Enum): + allow = 'allow' + ignore = 'ignore' + forbid = 'forbid' + + +# https://github.com/cython/cython/issues/4003 +# Will be fixed with Cython 3 but still in alpha right now +if not compiled: + from typing_extensions import TypedDict + + class ConfigDict(TypedDict, total=False): + title: Optional[str] + anystr_lower: bool + anystr_strip_whitespace: bool + min_anystr_length: int + max_anystr_length: Optional[int] + validate_all: bool + extra: Extra + allow_mutation: bool + frozen: bool + allow_population_by_field_name: bool + use_enum_values: bool + fields: Dict[str, Union[str, Dict[str, str]]] + validate_assignment: bool + error_msg_templates: Dict[str, str] + arbitrary_types_allowed: bool + orm_mode: bool + getter_dict: Type[GetterDict] + alias_generator: Optional[Callable[[str], str]] + keep_untouched: Tuple[type, ...] + schema_extra: Union[Dict[str, object], 'SchemaExtraCallable'] + json_loads: Callable[[str], object] + json_dumps: AnyArgTCallable[str] + json_encoders: Dict[Type[object], AnyCallable] + underscore_attrs_are_private: bool + allow_inf_nan: bool + + # whether or not inherited models as fields should be reconstructed as base model + copy_on_model_validation: bool + # whether dataclass `__post_init__` should be run after validation + post_init_call: Literal['before_validation', 'after_validation'] + +else: + ConfigDict = dict # type: ignore + + +class BaseConfig: + title: Optional[str] = None + anystr_lower: bool = False + anystr_upper: bool = False + anystr_strip_whitespace: bool = False + min_anystr_length: int = 0 + max_anystr_length: Optional[int] = None + validate_all: bool = False + extra: Extra = Extra.ignore + allow_mutation: bool = True + frozen: bool = False + allow_population_by_field_name: bool = False + use_enum_values: bool = False + fields: Dict[str, Union[str, Dict[str, str]]] = {} + validate_assignment: bool = False + error_msg_templates: Dict[str, str] = {} + arbitrary_types_allowed: bool = False + orm_mode: bool = False + getter_dict: Type[GetterDict] = GetterDict + alias_generator: Optional[Callable[[str], str]] = None + keep_untouched: Tuple[type, ...] = () + schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {} + json_loads: Callable[[str], Any] = json.loads + json_dumps: Callable[..., str] = json.dumps + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} + underscore_attrs_are_private: bool = False + allow_inf_nan: bool = True + + # whether inherited models as fields should be reconstructed as base model, + # and whether such a copy should be shallow or deep + copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow' + + # whether `Union` should check all allowed types before even trying to coerce + smart_union: bool = False + # whether dataclass `__post_init__` should be run before or after validation + post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation' + + @classmethod + def get_field_info(cls, name: str) -> Dict[str, Any]: + """ + Get properties of FieldInfo from the `fields` property of the config class. + """ + + fields_value = cls.fields.get(name) + + if isinstance(fields_value, str): + field_info: Dict[str, Any] = {'alias': fields_value} + elif isinstance(fields_value, dict): + field_info = fields_value + else: + field_info = {} + + if 'alias' in field_info: + field_info.setdefault('alias_priority', 2) + + if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator: + alias = cls.alias_generator(name) + if not isinstance(alias, str): + raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}') + field_info.update(alias=alias, alias_priority=1) + return field_info + + @classmethod + def prepare_field(cls, field: 'ModelField') -> None: + """ + Optional hook to check or modify fields during model creation. + """ + pass + + +def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: + if config is None: + return BaseConfig + + else: + config_dict = ( + config + if isinstance(config, dict) + else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} + ) + + class Config(BaseConfig): + ... + + for k, v in config_dict.items(): + setattr(Config, k, v) + return Config + + +def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType': + if not self_config: + base_classes: Tuple['ConfigType', ...] = (parent_config,) + elif self_config == parent_config: + base_classes = (self_config,) + else: + base_classes = self_config, parent_config + + namespace['json_encoders'] = { + **getattr(parent_config, 'json_encoders', {}), + **getattr(self_config, 'json_encoders', {}), + **namespace.get('json_encoders', {}), + } + + return type('Config', base_classes, namespace) + + +def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: + if not isinstance(config.extra, Extra): + try: + config.extra = Extra(config.extra) + except ValueError: + raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.cp37-win_amd64.pyd new file mode 100644 index 00000000..1cb0f830 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.py new file mode 100644 index 00000000..856c96d1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/dataclasses.py @@ -0,0 +1,458 @@ +""" +The main purpose is to enhance stdlib dataclasses by adding validation +A pydantic dataclass can be generated from scratch or from a stdlib one. + +Behind the scene, a pydantic dataclass is just like a regular one on which we attach +a `BaseModel` and magic methods to trigger the validation of the data. +`__init__` and `__post_init__` are hence overridden and have extra logic to be +able to validate input data. + +When a pydantic dataclass is generated from scratch, it's just a plain dataclass +with validation triggered at initialization + +The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. + +```py +@dataclasses.dataclass +class M: + x: int + +ValidatedM = pydantic.dataclasses.dataclass(M) +``` + +We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! + +```py +assert isinstance(ValidatedM(x=1), M) +assert ValidatedM(x=1) == M(x=1) +``` + +This means we **don't want to create a new dataclass that inherits from it** +The trick is to create a wrapper around `M` that will act as a proxy to trigger +validation without altering default `M` behaviour. +""" +import sys +from contextlib import contextmanager +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload + +from typing_extensions import dataclass_transform + +from .class_validators import gather_all_validators +from .config import BaseConfig, ConfigDict, Extra, get_config +from .error_wrappers import ValidationError +from .errors import DataclassTypeError +from .fields import Field, FieldInfo, Required, Undefined +from .main import create_model, validate_model +from .utils import ClassAttribute + +if TYPE_CHECKING: + from .main import BaseModel + from .typing import CallableGenerator, NoArgAnyCallable + + DataclassT = TypeVar('DataclassT', bound='Dataclass') + + DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy'] + + class Dataclass: + # stdlib attributes + __dataclass_fields__: ClassVar[Dict[str, Any]] + __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` + __post_init__: ClassVar[Callable[..., None]] + + # Added by pydantic + __pydantic_run_validation__: ClassVar[bool] + __post_init_post_parse__: ClassVar[Callable[..., None]] + __pydantic_initialised__: ClassVar[bool] + __pydantic_model__: ClassVar[Type[BaseModel]] + __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]] + __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value + + def __init__(self, *args: object, **kwargs: object) -> None: + pass + + @classmethod + def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator': + pass + + @classmethod + def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + pass + + +__all__ = [ + 'dataclass', + 'set_validation', + 'create_pydantic_model_from_dataclass', + 'is_builtin_dataclass', + 'make_dataclass_validator', +] + +_T = TypeVar('_T') + +if sys.version_info >= (3, 10): + + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + kw_only: bool = ..., + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: + ... + + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + kw_only: bool = ..., + ) -> 'DataclassClassOrWrapper': + ... + +else: + + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: + ... + + @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + ) -> 'DataclassClassOrWrapper': + ... + + +@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) +def dataclass( + _cls: Optional[Type[_T]] = None, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + kw_only: bool = False, +) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']: + """ + Like the python standard lib dataclasses but with type validation. + The result is either a pydantic dataclass that will validate input data + or a wrapper that will trigger validation around a stdlib dataclass + to avoid modifying it directly + """ + the_config = get_config(config) + + def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper': + import dataclasses + + if is_builtin_dataclass(cls): + dc_cls_doc = '' + dc_cls = DataclassProxy(cls) + default_validate_on_init = False + else: + dc_cls_doc = cls.__doc__ or '' # needs to be done before generating dataclass + if sys.version_info >= (3, 10): + dc_cls = dataclasses.dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + kw_only=kw_only, + ) + else: + dc_cls = dataclasses.dataclass( # type: ignore + cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen + ) + default_validate_on_init = True + + should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init + _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) + dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) + return dc_cls + + if _cls is None: + return wrap + + return wrap(_cls) + + +@contextmanager +def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]: + original_run_validation = cls.__pydantic_run_validation__ + try: + cls.__pydantic_run_validation__ = value + yield cls + finally: + cls.__pydantic_run_validation__ = original_run_validation + + +class DataclassProxy: + __slots__ = '__dataclass__' + + def __init__(self, dc_cls: Type['Dataclass']) -> None: + object.__setattr__(self, '__dataclass__', dc_cls) + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + with set_validation(self.__dataclass__, True): + return self.__dataclass__(*args, **kwargs) + + def __getattr__(self, name: str) -> Any: + return getattr(self.__dataclass__, name) + + def __instancecheck__(self, instance: Any) -> bool: + return isinstance(instance, self.__dataclass__) + + +def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) + dc_cls: Type['Dataclass'], + config: Type[BaseConfig], + validate_on_init: bool, + dc_cls_doc: str, +) -> None: + """ + We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass + it won't even exist (code is generated on the fly by `dataclasses`) + By default, we run validation after `__init__` or `__post_init__` if defined + """ + init = dc_cls.__init__ + + @wraps(init) + def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.extra == Extra.ignore: + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + elif config.extra == Extra.allow: + for k, v in kwargs.items(): + self.__dict__.setdefault(k, v) + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + else: + init(self, *args, **kwargs) + + if hasattr(dc_cls, '__post_init__'): + post_init = dc_cls.__post_init__ + + @wraps(post_init) + def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.post_init_call == 'before_validation': + post_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + if hasattr(self, '__post_init_post_parse__'): + self.__post_init_post_parse__(*args, **kwargs) + + if config.post_init_call == 'after_validation': + post_init(self, *args, **kwargs) + + setattr(dc_cls, '__init__', handle_extra_init) + setattr(dc_cls, '__post_init__', new_post_init) + + else: + + @wraps(init) + def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + handle_extra_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + + if hasattr(self, '__post_init_post_parse__'): + # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of + # public method `dataclasses.fields` + import dataclasses + + # get all initvars and their default values + initvars_and_values: Dict[str, Any] = {} + for i, f in enumerate(self.__class__.__dataclass_fields__.values()): + if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] + try: + # set arg value by default + initvars_and_values[f.name] = args[i] + except IndexError: + initvars_and_values[f.name] = kwargs.get(f.name, f.default) + + self.__post_init_post_parse__(**initvars_and_values) + + setattr(dc_cls, '__init__', new_init) + + setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init)) + setattr(dc_cls, '__pydantic_initialised__', False) + setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) + setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values) + setattr(dc_cls, '__validate__', classmethod(_validate_dataclass)) + setattr(dc_cls, '__get_validators__', classmethod(_get_validators)) + + if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: + setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr) + + +def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator': + yield cls.__validate__ + + +def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + with set_validation(cls, True): + if isinstance(v, cls): + v.__pydantic_validate_values__() + return v + elif isinstance(v, (list, tuple)): + return cls(*v) + elif isinstance(v, dict): + return cls(**v) + else: + raise DataclassTypeError(class_name=cls.__name__) + + +def create_pydantic_model_from_dataclass( + dc_cls: Type['Dataclass'], + config: Type[Any] = BaseConfig, + dc_cls_doc: Optional[str] = None, +) -> Type['BaseModel']: + import dataclasses + + field_definitions: Dict[str, Any] = {} + for field in dataclasses.fields(dc_cls): + default: Any = Undefined + default_factory: Optional['NoArgAnyCallable'] = None + field_info: FieldInfo + + if field.default is not dataclasses.MISSING: + default = field.default + elif field.default_factory is not dataclasses.MISSING: + default_factory = field.default_factory + else: + default = Required + + if isinstance(default, FieldInfo): + field_info = default + dc_cls.__pydantic_has_field_info_default__ = True + else: + field_info = Field(default=default, default_factory=default_factory, **field.metadata) + + field_definitions[field.name] = (field.type, field_info) + + validators = gather_all_validators(dc_cls) + model: Type['BaseModel'] = create_model( + dc_cls.__name__, + __config__=config, + __module__=dc_cls.__module__, + __validators__=validators, + __cls_kwargs__={'__resolve_forward_refs__': False}, + **field_definitions, + ) + model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or '' + return model + + +def _dataclass_validate_values(self: 'Dataclass') -> None: + # validation errors can occur if this function is called twice on an already initialised dataclass. + # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property + if getattr(self, '__pydantic_initialised__'): + return + if getattr(self, '__pydantic_has_field_info_default__', False): + # We need to remove `FieldInfo` values since they are not valid as input + # It's ok to do that because they are obviously the default values! + input_data = {k: v for k, v in self.__dict__.items() if not isinstance(v, FieldInfo)} + else: + input_data = self.__dict__ + d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) + if validation_error: + raise validation_error + self.__dict__.update(d) + object.__setattr__(self, '__pydantic_initialised__', True) + + +def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None: + if self.__pydantic_initialised__: + d = dict(self.__dict__) + d.pop(name, None) + known_field = self.__pydantic_model__.__fields__.get(name, None) + if known_field: + value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + + object.__setattr__(self, name, value) + + +def is_builtin_dataclass(_cls: Type[Any]) -> bool: + """ + Whether a class is a stdlib dataclass + (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) + + we check that + - `_cls` is a dataclass + - `_cls` is not a processed pydantic dataclass (with a basemodel attached) + - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass + e.g. + ``` + @dataclasses.dataclass + class A: + x: int + + @pydantic.dataclasses.dataclass + class B(A): + y: int + ``` + In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), + which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') + """ + import dataclasses + + return ( + dataclasses.is_dataclass(_cls) + and not hasattr(_cls, '__pydantic_model__') + and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) + ) + + +def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator': + """ + Create a pydantic.dataclass from a builtin dataclass to add type validation + and yield the validators + It retrieves the parameters of the dataclass and forwards them to the newly created dataclass + """ + yield from _get_validators(dataclass(dc_cls, config=config, validate_on_init=False)) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.cp37-win_amd64.pyd new file mode 100644 index 00000000..977183b0 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.py new file mode 100644 index 00000000..cfd54593 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/datetime_parse.py @@ -0,0 +1,248 @@ +""" +Functions to parse datetime objects. + +We're using regular expressions rather than time.strptime because: +- They provide both validation and parsing. +- They're more flexible for datetimes. +- The date/datetime/time constructors produce friendlier error messages. + +Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at +9718fa2e8abe430c3526a9278dd976443d4ae3c6 + +Changed to: +* use standard python datetime types not django.utils.timezone +* raise ValueError when regex doesn't match rather than returning None +* support parsing unix timestamps for dates and datetimes +""" +import re +from datetime import date, datetime, time, timedelta, timezone +from typing import Dict, Optional, Type, Union + +from . import errors + +date_expr = r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' +time_expr = ( + r'(?P\d{1,2}):(?P\d{1,2})' + r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' + r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' +) + +date_re = re.compile(f'{date_expr}$') +time_re = re.compile(time_expr) +datetime_re = re.compile(f'{date_expr}[T ]{time_expr}') + +standard_duration_re = re.compile( + r'^' + r'(?:(?P-?\d+) (days?, )?)?' + r'((?:(?P-?\d+):)(?=\d+:\d+))?' + r'(?:(?P-?\d+):)?' + r'(?P-?\d+)' + r'(?:\.(?P\d{1,6})\d{0,6})?' + r'$' +) + +# Support the sections of ISO 8601 date representation that are accepted by timedelta +iso8601_duration_re = re.compile( + r'^(?P[-+]?)' + r'P' + r'(?:(?P\d+(.\d+)?)D)?' + r'(?:T' + r'(?:(?P\d+(.\d+)?)H)?' + r'(?:(?P\d+(.\d+)?)M)?' + r'(?:(?P\d+(.\d+)?)S)?' + r')?' + r'$' +) + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) +StrBytesIntFloat = Union[str, bytes, int, float] + + +def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') + + +def from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: + if value == 'Z': + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == '-': + offset = -offset + try: + return timezone(timedelta(minutes=offset)) + except ValueError: + raise error() + else: + return None + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = get_numeric(value, 'date') + if number is not None: + return from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + match = date_re.match(value) # type: ignore + if match is None: + raise errors.DateError() + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise errors.DateError() + + +def parse_time(value: Union[time, StrBytesIntFloat]) -> time: + """ + Parse a time/string and return a datetime.time. + + Raise ValueError if the input is well formatted but not a valid time. + Raise ValueError if the input isn't well formatted, in particular if it contains an offset. + """ + if isinstance(value, time): + return value + + number = get_numeric(value, 'time') + if number is not None: + if number >= 86400: + # doesn't make sense since the time time loop back around to 0 + raise errors.TimeError() + return (datetime.min + timedelta(seconds=number)).time() + + if isinstance(value, bytes): + value = value.decode() + + match = time_re.match(value) # type: ignore + if match is None: + raise errors.TimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return time(**kw_) # type: ignore + except ValueError: + raise errors.TimeError() + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = get_numeric(value, 'datetime') + if number is not None: + return from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + match = datetime_re.match(value) # type: ignore + if match is None: + raise errors.DateTimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return datetime(**kw_) # type: ignore + except ValueError: + raise errors.DateTimeError() + + +def parse_duration(value: StrBytesIntFloat) -> timedelta: + """ + Parse a duration int/float/string and return a datetime.timedelta. + + The preferred format for durations in Django is '%d %H:%M:%S.%f'. + + Also supports ISO 8601 representation. + """ + if isinstance(value, timedelta): + return value + + if isinstance(value, (int, float)): + # below code requires a string + value = f'{value:f}' + elif isinstance(value, bytes): + value = value.decode() + + try: + match = standard_duration_re.match(value) or iso8601_duration_re.match(value) + except TypeError: + raise TypeError('invalid type; expected timedelta, string, bytes, int or float') + + if not match: + raise errors.DurationError() + + kw = match.groupdict() + sign = -1 if kw.pop('sign', '+') == '-' else 1 + if kw.get('microseconds'): + kw['microseconds'] = kw['microseconds'].ljust(6, '0') + + if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): + kw['microseconds'] = '-' + kw['microseconds'] + + kw_ = {k: float(v) for k, v in kw.items() if v is not None} + + return sign * timedelta(**kw_) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.cp37-win_amd64.pyd new file mode 100644 index 00000000..39888108 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.py new file mode 100644 index 00000000..089aab65 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/decorator.py @@ -0,0 +1,264 @@ +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload + +from . import validator +from .config import Extra +from .errors import ConfigError +from .main import BaseModel, create_model +from .typing import get_all_type_hints +from .utils import to_camel + +__all__ = ('validate_arguments',) + +if TYPE_CHECKING: + from .typing import AnyCallable + + AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) + ConfigType = Union[None, Type[Any], Dict[str, Any]] + + +@overload +def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: + ... + + +@overload +def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': + ... + + +def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: + """ + Decorator to validate the arguments passed to a function. + """ + + def validate(_func: 'AnyCallable') -> 'AnyCallable': + vd = ValidatedFunction(_func, config) + + @wraps(_func) + def wrapper_function(*args: Any, **kwargs: Any) -> Any: + return vd.call(*args, **kwargs) + + wrapper_function.vd = vd # type: ignore + wrapper_function.validate = vd.init_model_instance # type: ignore + wrapper_function.raw_function = vd.raw_function # type: ignore + wrapper_function.model = vd.model # type: ignore + return wrapper_function + + if func: + return validate(func) + else: + return validate + + +ALT_V_ARGS = 'v__args' +ALT_V_KWARGS = 'v__kwargs' +V_POSITIONAL_ONLY_NAME = 'v__positional_only' +V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' + + +class ValidatedFunction: + def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901 + from inspect import Parameter, signature + + parameters: Mapping[str, Parameter] = signature(function).parameters + + if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: + raise ConfigError( + f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' + f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator' + ) + + self.raw_function = function + self.arg_mapping: Dict[int, str] = {} + self.positional_only_args = set() + self.v_args_name = 'args' + self.v_kwargs_name = 'kwargs' + + type_hints = get_all_type_hints(function) + takes_args = False + takes_kwargs = False + fields: Dict[str, Tuple[Any, Any]] = {} + for i, (name, p) in enumerate(parameters.items()): + if p.annotation is p.empty: + annotation = Any + else: + annotation = type_hints[name] + + default = ... if p.default is p.empty else p.default + if p.kind == Parameter.POSITIONAL_ONLY: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_POSITIONAL_ONLY_NAME] = List[str], None + self.positional_only_args.add(name) + elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_DUPLICATE_KWARGS] = List[str], None + elif p.kind == Parameter.KEYWORD_ONLY: + fields[name] = annotation, default + elif p.kind == Parameter.VAR_POSITIONAL: + self.v_args_name = name + fields[name] = Tuple[annotation, ...], None + takes_args = True + else: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + self.v_kwargs_name = name + fields[name] = Dict[str, annotation], None # type: ignore + takes_kwargs = True + + # these checks avoid a clash between "args" and a field with that name + if not takes_args and self.v_args_name in fields: + self.v_args_name = ALT_V_ARGS + + # same with "kwargs" + if not takes_kwargs and self.v_kwargs_name in fields: + self.v_kwargs_name = ALT_V_KWARGS + + if not takes_args: + # we add the field so validation below can raise the correct exception + fields[self.v_args_name] = List[Any], None + + if not takes_kwargs: + # same with kwargs + fields[self.v_kwargs_name] = Dict[Any, Any], None + + self.create_model(fields, takes_args, takes_kwargs, config) + + def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: + values = self.build_values(args, kwargs) + return self.model(**values) + + def call(self, *args: Any, **kwargs: Any) -> Any: + m = self.init_model_instance(*args, **kwargs) + return self.execute(m) + + def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: + values: Dict[str, Any] = {} + if args: + arg_iter = enumerate(args) + while True: + try: + i, a = next(arg_iter) + except StopIteration: + break + arg_name = self.arg_mapping.get(i) + if arg_name is not None: + values[arg_name] = a + else: + values[self.v_args_name] = [a] + [a for _, a in arg_iter] + break + + var_kwargs: Dict[str, Any] = {} + wrong_positional_args = [] + duplicate_kwargs = [] + fields_alias = [ + field.alias + for name, field in self.model.__fields__.items() + if name not in (self.v_args_name, self.v_kwargs_name) + ] + non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name} + for k, v in kwargs.items(): + if k in non_var_fields or k in fields_alias: + if k in self.positional_only_args: + wrong_positional_args.append(k) + if k in values: + duplicate_kwargs.append(k) + values[k] = v + else: + var_kwargs[k] = v + + if var_kwargs: + values[self.v_kwargs_name] = var_kwargs + if wrong_positional_args: + values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args + if duplicate_kwargs: + values[V_DUPLICATE_KWARGS] = duplicate_kwargs + return values + + def execute(self, m: BaseModel) -> Any: + d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory} + var_kwargs = d.pop(self.v_kwargs_name, {}) + + if self.v_args_name in d: + args_: List[Any] = [] + in_kwargs = False + kwargs = {} + for name, value in d.items(): + if in_kwargs: + kwargs[name] = value + elif name == self.v_args_name: + args_ += value + in_kwargs = True + else: + args_.append(value) + return self.raw_function(*args_, **kwargs, **var_kwargs) + elif self.positional_only_args: + args_ = [] + kwargs = {} + for name, value in d.items(): + if name in self.positional_only_args: + args_.append(value) + else: + kwargs[name] = value + return self.raw_function(*args_, **kwargs, **var_kwargs) + else: + return self.raw_function(**d, **var_kwargs) + + def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: + pos_args = len(self.arg_mapping) + + class CustomConfig: + pass + + if not TYPE_CHECKING: # pragma: no branch + if isinstance(config, dict): + CustomConfig = type('Config', (), config) # noqa: F811 + elif config is not None: + CustomConfig = config # noqa: F811 + + if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'): + raise ConfigError( + 'Setting the "fields" and "alias_generator" property on custom Config for ' + '@validate_arguments is not yet supported, please remove.' + ) + + class DecoratorBaseModel(BaseModel): + @validator(self.v_args_name, check_fields=False, allow_reuse=True) + def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: + if takes_args or v is None: + return v + + raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') + + @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True) + def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: + if takes_kwargs or v is None: + return v + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v.keys())) + raise TypeError(f'unexpected keyword argument{plural}: {keys}') + + @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True) + def check_positional_only(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') + + @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True) + def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'multiple values for argument{plural}: {keys}') + + class Config(CustomConfig): + extra = getattr(CustomConfig, 'extra', Extra.forbid) + + self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.cp37-win_amd64.pyd new file mode 100644 index 00000000..acacb246 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.py new file mode 100644 index 00000000..e9988c01 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/env_settings.py @@ -0,0 +1,346 @@ +import os +import warnings +from pathlib import Path +from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union + +from .config import BaseConfig, Extra +from .fields import ModelField +from .main import BaseModel +from .typing import StrPath, display_as_type, get_origin, is_union +from .utils import deep_update, path_type, sequence_like + +env_file_sentinel = str(object()) + +SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]] +DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] + + +class SettingsError(ValueError): + pass + + +class BaseSettings(BaseModel): + """ + Base class for settings, allowing values to be overridden by environment variables. + + This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), + Heroku and any 12 factor app design. + """ + + def __init__( + __pydantic_self__, + _env_file: Optional[DotenvType] = env_file_sentinel, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + **values: Any, + ) -> None: + # Uses something other than `self` the first arg to allow "self" as a settable attribute + super().__init__( + **__pydantic_self__._build_values( + values, + _env_file=_env_file, + _env_file_encoding=_env_file_encoding, + _env_nested_delimiter=_env_nested_delimiter, + _secrets_dir=_secrets_dir, + ) + ) + + def _build_values( + self, + init_kwargs: Dict[str, Any], + _env_file: Optional[DotenvType] = None, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + ) -> Dict[str, Any]: + # Configure built-in sources + init_settings = InitSettingsSource(init_kwargs=init_kwargs) + env_settings = EnvSettingsSource( + env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), + env_file_encoding=( + _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding + ), + env_nested_delimiter=( + _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter + ), + env_prefix_len=len(self.__config__.env_prefix), + ) + file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) + # Provide a hook to set built-in sources priority and add / remove sources + sources = self.__config__.customise_sources( + init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings + ) + if sources: + return deep_update(*reversed([source(self) for source in sources])) + else: + # no one should mean to do this, but I think returning an empty dict is marginally preferable + # to an informative error and much better than a confusing error + return {} + + class Config(BaseConfig): + env_prefix: str = '' + env_file: Optional[DotenvType] = None + env_file_encoding: Optional[str] = None + env_nested_delimiter: Optional[str] = None + secrets_dir: Optional[StrPath] = None + validate_all: bool = True + extra: Extra = Extra.forbid + arbitrary_types_allowed: bool = True + case_sensitive: bool = False + + @classmethod + def prepare_field(cls, field: ModelField) -> None: + env_names: Union[List[str], AbstractSet[str]] + field_info_from_config = cls.get_field_info(field.name) + + env = field_info_from_config.get('env') or field.field_info.extra.get('env') + if env is None: + if field.has_alias: + warnings.warn( + 'aliases are no longer used by BaseSettings to define which environment variables to read. ' + 'Instead use the "env" field setting. ' + 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', + FutureWarning, + ) + env_names = {cls.env_prefix + field.name} + elif isinstance(env, str): + env_names = {env} + elif isinstance(env, (set, frozenset)): + env_names = env + elif sequence_like(env): + env_names = list(env) + else: + raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') + + if not cls.case_sensitive: + env_names = env_names.__class__(n.lower() for n in env_names) + field.field_info.extra['env_names'] = env_names + + @classmethod + def customise_sources( + cls, + init_settings: SettingsSourceCallable, + env_settings: SettingsSourceCallable, + file_secret_settings: SettingsSourceCallable, + ) -> Tuple[SettingsSourceCallable, ...]: + return init_settings, env_settings, file_secret_settings + + @classmethod + def parse_env_var(cls, field_name: str, raw_val: str) -> Any: + return cls.json_loads(raw_val) + + # populated by the metaclass using the Config class defined above, annotated here to help IDEs only + __config__: ClassVar[Type[Config]] + + +class InitSettingsSource: + __slots__ = ('init_kwargs',) + + def __init__(self, init_kwargs: Dict[str, Any]): + self.init_kwargs = init_kwargs + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + return self.init_kwargs + + def __repr__(self) -> str: + return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})' + + +class EnvSettingsSource: + __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len') + + def __init__( + self, + env_file: Optional[DotenvType], + env_file_encoding: Optional[str], + env_nested_delimiter: Optional[str] = None, + env_prefix_len: int = 0, + ): + self.env_file: Optional[DotenvType] = env_file + self.env_file_encoding: Optional[str] = env_file_encoding + self.env_nested_delimiter: Optional[str] = env_nested_delimiter + self.env_prefix_len: int = env_prefix_len + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 + """ + Build environment variables suitable for passing to the Model. + """ + d: Dict[str, Any] = {} + + if settings.__config__.case_sensitive: + env_vars: Mapping[str, Optional[str]] = os.environ + else: + env_vars = {k.lower(): v for k, v in os.environ.items()} + + dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) + if dotenv_vars: + env_vars = {**dotenv_vars, **env_vars} + + for field in settings.__fields__.values(): + env_val: Optional[str] = None + for env_name in field.field_info.extra['env_names']: + env_val = env_vars.get(env_name) + if env_val is not None: + break + + is_complex, allow_parse_failure = self.field_is_complex(field) + if is_complex: + if env_val is None: + # field is complex but no value found so far, try explode_env_vars + env_val_built = self.explode_env_vars(field, env_vars) + if env_val_built: + d[field.alias] = env_val_built + else: + # field is complex and there's a value, decode that as JSON, then add explode_env_vars + try: + env_val = settings.__config__.parse_env_var(field.name, env_val) + except ValueError as e: + if not allow_parse_failure: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + if isinstance(env_val, dict): + d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) + else: + d[field.alias] = env_val + elif env_val is not None: + # simplest case, field is not complex, we only need to add the value if it was found + d[field.alias] = env_val + + return d + + def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: + env_files = self.env_file + if env_files is None: + return {} + + if isinstance(env_files, (str, os.PathLike)): + env_files = [env_files] + + dotenv_vars = {} + for env_file in env_files: + env_path = Path(env_file).expanduser() + if env_path.is_file(): + dotenv_vars.update( + read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) + ) + + return dotenv_vars + + def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: + """ + Find out if a field is complex, and if so whether JSON errors should be ignored + """ + if field.is_complex(): + allow_parse_failure = False + elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): + allow_parse_failure = True + else: + return False, False + + return True, allow_parse_failure + + def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: + """ + Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. + + This is applied to a single field, hence filtering by env_var prefix. + """ + prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']] + result: Dict[str, Any] = {} + for env_name, env_val in env_vars.items(): + if not any(env_name.startswith(prefix) for prefix in prefixes): + continue + # we remove the prefix before splitting in case the prefix has characters in common with the delimiter + env_name_without_prefix = env_name[self.env_prefix_len :] + _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) + env_var = result + for key in keys: + env_var = env_var.setdefault(key, {}) + env_var[last_key] = env_val + + return result + + def __repr__(self) -> str: + return ( + f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, ' + f'env_nested_delimiter={self.env_nested_delimiter!r})' + ) + + +class SecretsSettingsSource: + __slots__ = ('secrets_dir',) + + def __init__(self, secrets_dir: Optional[StrPath]): + self.secrets_dir: Optional[StrPath] = secrets_dir + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + """ + Build fields from "secrets" files. + """ + secrets: Dict[str, Optional[str]] = {} + + if self.secrets_dir is None: + return secrets + + secrets_path = Path(self.secrets_dir).expanduser() + + if not secrets_path.exists(): + warnings.warn(f'directory "{secrets_path}" does not exist') + return secrets + + if not secrets_path.is_dir(): + raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}') + + for field in settings.__fields__.values(): + for env_name in field.field_info.extra['env_names']: + path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) + if not path: + # path does not exist, we curently don't return a warning for this + continue + + if path.is_file(): + secret_value = path.read_text().strip() + if field.is_complex(): + try: + secret_value = settings.__config__.parse_env_var(field.name, secret_value) + except ValueError as e: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + secrets[field.alias] = secret_value + else: + warnings.warn( + f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', + stacklevel=4, + ) + return secrets + + def __repr__(self) -> str: + return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})' + + +def read_env_file( + file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False +) -> Dict[str, Optional[str]]: + try: + from dotenv import dotenv_values + except ImportError as e: + raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e + + file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8') + if not case_sensitive: + return {k.lower(): v for k, v in file_vars.items()} + else: + return file_vars + + +def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: + """ + Find a file within path's directory matching filename, optionally ignoring case. + """ + for f in dir_path.iterdir(): + if f.name == file_name: + return f + elif not case_sensitive and f.name.lower() == file_name.lower(): + return f + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.cp37-win_amd64.pyd new file mode 100644 index 00000000..1bcd4e87 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.py new file mode 100644 index 00000000..5d3204f4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/error_wrappers.py @@ -0,0 +1,162 @@ +import json +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union + +from .json import pydantic_encoder +from .utils import Representation + +if TYPE_CHECKING: + from typing_extensions import TypedDict + + from .config import BaseConfig + from .types import ModelOrDc + from .typing import ReprArgs + + Loc = Tuple[Union[int, str], ...] + + class _ErrorDictRequired(TypedDict): + loc: Loc + msg: str + type: str + + class ErrorDict(_ErrorDictRequired, total=False): + ctx: Dict[str, Any] + + +__all__ = 'ErrorWrapper', 'ValidationError' + + +class ErrorWrapper(Representation): + __slots__ = 'exc', '_loc' + + def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: + self.exc = exc + self._loc = loc + + def loc_tuple(self) -> 'Loc': + if isinstance(self._loc, tuple): + return self._loc + else: + return (self._loc,) + + def __repr_args__(self) -> 'ReprArgs': + return [('exc', self.exc), ('loc', self.loc_tuple())] + + +# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] +# but recursive, therefore just use: +ErrorList = Union[Sequence[Any], ErrorWrapper] + + +class ValidationError(Representation, ValueError): + __slots__ = 'raw_errors', 'model', '_error_cache' + + def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: + self.raw_errors = errors + self.model = model + self._error_cache: Optional[List['ErrorDict']] = None + + def errors(self) -> List['ErrorDict']: + if self._error_cache is None: + try: + config = self.model.__config__ # type: ignore + except AttributeError: + config = self.model.__pydantic_model__.__config__ # type: ignore + self._error_cache = list(flatten_errors(self.raw_errors, config)) + return self._error_cache + + def json(self, *, indent: Union[None, int, str] = 2) -> str: + return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) + + def __str__(self) -> str: + errors = self.errors() + no_errors = len(errors) + return ( + f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' + f'{display_errors(errors)}' + ) + + def __repr_args__(self) -> 'ReprArgs': + return [('model', self.model.__name__), ('errors', self.errors())] + + +def display_errors(errors: List['ErrorDict']) -> str: + return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) + + +def _display_error_loc(error: 'ErrorDict') -> str: + return ' -> '.join(str(e) for e in error['loc']) + + +def _display_error_type_and_ctx(error: 'ErrorDict') -> str: + t = 'type=' + error['type'] + ctx = error.get('ctx') + if ctx: + return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) + else: + return t + + +def flatten_errors( + errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None +) -> Generator['ErrorDict', None, None]: + for error in errors: + if isinstance(error, ErrorWrapper): + + if loc: + error_loc = loc + error.loc_tuple() + else: + error_loc = error.loc_tuple() + + if isinstance(error.exc, ValidationError): + yield from flatten_errors(error.exc.raw_errors, config, error_loc) + else: + yield error_dict(error.exc, config, error_loc) + elif isinstance(error, list): + yield from flatten_errors(error, config, loc=loc) + else: + raise RuntimeError(f'Unknown error object: {error}') + + +def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict': + type_ = get_exc_type(exc.__class__) + msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) + ctx = exc.__dict__ + if msg_template: + msg = msg_template.format(**ctx) + else: + msg = str(exc) + + d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} + + if ctx: + d['ctx'] = ctx + + return d + + +_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} + + +def get_exc_type(cls: Type[Exception]) -> str: + # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up + try: + return _EXC_TYPE_CACHE[cls] + except KeyError: + r = _get_exc_type(cls) + _EXC_TYPE_CACHE[cls] = r + return r + + +def _get_exc_type(cls: Type[Exception]) -> str: + if issubclass(cls, AssertionError): + return 'assertion_error' + + base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' + if cls in (TypeError, ValueError): + # just TypeError or ValueError, no extra code + return base_name + + # if it's not a TypeError or ValueError, we just take the lowercase of the exception name + # no chaining or snake case logic, use "code" for more complex error types. + code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() + return base_name + '.' + code diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.cp37-win_amd64.pyd new file mode 100644 index 00000000..387cfaf0 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.py new file mode 100644 index 00000000..7bdafdd1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/errors.py @@ -0,0 +1,646 @@ +from decimal import Decimal +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union + +from .typing import display_as_type + +if TYPE_CHECKING: + from .typing import DictStrAny + +# explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. +__all__ = ( + 'PydanticTypeError', + 'PydanticValueError', + 'ConfigError', + 'MissingError', + 'ExtraError', + 'NoneIsNotAllowedError', + 'NoneIsAllowedError', + 'WrongConstantError', + 'NotNoneError', + 'BoolError', + 'BytesError', + 'DictError', + 'EmailError', + 'UrlError', + 'UrlSchemeError', + 'UrlSchemePermittedError', + 'UrlUserInfoError', + 'UrlHostError', + 'UrlHostTldError', + 'UrlPortError', + 'UrlExtraError', + 'EnumError', + 'IntEnumError', + 'EnumMemberError', + 'IntegerError', + 'FloatError', + 'PathError', + 'PathNotExistsError', + 'PathNotAFileError', + 'PathNotADirectoryError', + 'PyObjectError', + 'SequenceError', + 'ListError', + 'SetError', + 'FrozenSetError', + 'TupleError', + 'TupleLengthError', + 'ListMinLengthError', + 'ListMaxLengthError', + 'ListUniqueItemsError', + 'SetMinLengthError', + 'SetMaxLengthError', + 'FrozenSetMinLengthError', + 'FrozenSetMaxLengthError', + 'AnyStrMinLengthError', + 'AnyStrMaxLengthError', + 'StrError', + 'StrRegexError', + 'NumberNotGtError', + 'NumberNotGeError', + 'NumberNotLtError', + 'NumberNotLeError', + 'NumberNotMultipleError', + 'DecimalError', + 'DecimalIsNotFiniteError', + 'DecimalMaxDigitsError', + 'DecimalMaxPlacesError', + 'DecimalWholeDigitsError', + 'DateTimeError', + 'DateError', + 'DateNotInThePastError', + 'DateNotInTheFutureError', + 'TimeError', + 'DurationError', + 'HashableError', + 'UUIDError', + 'UUIDVersionError', + 'ArbitraryTypeError', + 'ClassError', + 'SubclassError', + 'JsonError', + 'JsonTypeError', + 'PatternError', + 'DataclassTypeError', + 'CallableError', + 'IPvAnyAddressError', + 'IPvAnyInterfaceError', + 'IPvAnyNetworkError', + 'IPv4AddressError', + 'IPv6AddressError', + 'IPv4NetworkError', + 'IPv6NetworkError', + 'IPv4InterfaceError', + 'IPv6InterfaceError', + 'ColorError', + 'StrictBoolError', + 'NotDigitError', + 'LuhnValidationError', + 'InvalidLengthForBrand', + 'InvalidByteSize', + 'InvalidByteSizeUnit', + 'MissingDiscriminator', + 'InvalidDiscriminator', +) + + +def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin': + """ + For built-in exceptions like ValueError or TypeError, we need to implement + __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) + By default pickle protocol 2 calls `cls.__new__(cls, *args)`. + Since we only use kwargs, we need a little constructor to change that. + Note: the callable can't be a lambda as pickle looks in the namespace to find it + """ + return cls(**ctx) + + +class PydanticErrorMixin: + code: str + msg_template: str + + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx + + def __str__(self) -> str: + return self.msg_template.format(**self.__dict__) + + def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]: + return cls_kwargs, (self.__class__, self.__dict__) + + +class PydanticTypeError(PydanticErrorMixin, TypeError): + pass + + +class PydanticValueError(PydanticErrorMixin, ValueError): + pass + + +class ConfigError(RuntimeError): + pass + + +class MissingError(PydanticValueError): + msg_template = 'field required' + + +class ExtraError(PydanticValueError): + msg_template = 'extra fields not permitted' + + +class NoneIsNotAllowedError(PydanticTypeError): + code = 'none.not_allowed' + msg_template = 'none is not an allowed value' + + +class NoneIsAllowedError(PydanticTypeError): + code = 'none.allowed' + msg_template = 'value is not none' + + +class WrongConstantError(PydanticValueError): + code = 'const' + + def __str__(self) -> str: + permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore + return f'unexpected value; permitted: {permitted}' + + +class NotNoneError(PydanticTypeError): + code = 'not_none' + msg_template = 'value is not None' + + +class BoolError(PydanticTypeError): + msg_template = 'value could not be parsed to a boolean' + + +class BytesError(PydanticTypeError): + msg_template = 'byte type expected' + + +class DictError(PydanticTypeError): + msg_template = 'value is not a valid dict' + + +class EmailError(PydanticValueError): + msg_template = 'value is not a valid email address' + + +class UrlError(PydanticValueError): + code = 'url' + + +class UrlSchemeError(UrlError): + code = 'url.scheme' + msg_template = 'invalid or missing URL scheme' + + +class UrlSchemePermittedError(UrlError): + code = 'url.scheme' + msg_template = 'URL scheme not permitted' + + def __init__(self, allowed_schemes: Set[str]): + super().__init__(allowed_schemes=allowed_schemes) + + +class UrlUserInfoError(UrlError): + code = 'url.userinfo' + msg_template = 'userinfo required in URL but missing' + + +class UrlHostError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid' + + +class UrlHostTldError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid, top level domain required' + + +class UrlPortError(UrlError): + code = 'url.port' + msg_template = 'URL port invalid, port cannot exceed 65535' + + +class UrlExtraError(UrlError): + code = 'url.extra' + msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' + + +class EnumMemberError(PydanticTypeError): + code = 'enum' + + def __str__(self) -> str: + permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore + return f'value is not a valid enumeration member; permitted: {permitted}' + + +class IntegerError(PydanticTypeError): + msg_template = 'value is not a valid integer' + + +class FloatError(PydanticTypeError): + msg_template = 'value is not a valid float' + + +class PathError(PydanticTypeError): + msg_template = 'value is not a valid path' + + +class _PathValueError(PydanticValueError): + def __init__(self, *, path: Path) -> None: + super().__init__(path=str(path)) + + +class PathNotExistsError(_PathValueError): + code = 'path.not_exists' + msg_template = 'file or directory at path "{path}" does not exist' + + +class PathNotAFileError(_PathValueError): + code = 'path.not_a_file' + msg_template = 'path "{path}" does not point to a file' + + +class PathNotADirectoryError(_PathValueError): + code = 'path.not_a_directory' + msg_template = 'path "{path}" does not point to a directory' + + +class PyObjectError(PydanticTypeError): + msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' + + +class SequenceError(PydanticTypeError): + msg_template = 'value is not a valid sequence' + + +class IterableError(PydanticTypeError): + msg_template = 'value is not a valid iterable' + + +class ListError(PydanticTypeError): + msg_template = 'value is not a valid list' + + +class SetError(PydanticTypeError): + msg_template = 'value is not a valid set' + + +class FrozenSetError(PydanticTypeError): + msg_template = 'value is not a valid frozenset' + + +class DequeError(PydanticTypeError): + msg_template = 'value is not a valid deque' + + +class TupleError(PydanticTypeError): + msg_template = 'value is not a valid tuple' + + +class TupleLengthError(PydanticValueError): + code = 'tuple.length' + msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' + + def __init__(self, *, actual_length: int, expected_length: int) -> None: + super().__init__(actual_length=actual_length, expected_length=expected_length) + + +class ListMinLengthError(PydanticValueError): + code = 'list.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListMaxLengthError(PydanticValueError): + code = 'list.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListUniqueItemsError(PydanticValueError): + code = 'list.unique_items' + msg_template = 'the list has duplicated items' + + +class SetMinLengthError(PydanticValueError): + code = 'set.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class SetMaxLengthError(PydanticValueError): + code = 'set.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMinLengthError(PydanticValueError): + code = 'frozenset.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMaxLengthError(PydanticValueError): + code = 'frozenset.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMinLengthError(PydanticValueError): + code = 'any_str.min_length' + msg_template = 'ensure this value has at least {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMaxLengthError(PydanticValueError): + code = 'any_str.max_length' + msg_template = 'ensure this value has at most {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class StrError(PydanticTypeError): + msg_template = 'str type expected' + + +class StrRegexError(PydanticValueError): + code = 'str.regex' + msg_template = 'string does not match regex "{pattern}"' + + def __init__(self, *, pattern: str) -> None: + super().__init__(pattern=pattern) + + +class _NumberBoundError(PydanticValueError): + def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: + super().__init__(limit_value=limit_value) + + +class NumberNotGtError(_NumberBoundError): + code = 'number.not_gt' + msg_template = 'ensure this value is greater than {limit_value}' + + +class NumberNotGeError(_NumberBoundError): + code = 'number.not_ge' + msg_template = 'ensure this value is greater than or equal to {limit_value}' + + +class NumberNotLtError(_NumberBoundError): + code = 'number.not_lt' + msg_template = 'ensure this value is less than {limit_value}' + + +class NumberNotLeError(_NumberBoundError): + code = 'number.not_le' + msg_template = 'ensure this value is less than or equal to {limit_value}' + + +class NumberNotFiniteError(PydanticValueError): + code = 'number.not_finite_number' + msg_template = 'ensure this value is a finite number' + + +class NumberNotMultipleError(PydanticValueError): + code = 'number.not_multiple' + msg_template = 'ensure this value is a multiple of {multiple_of}' + + def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: + super().__init__(multiple_of=multiple_of) + + +class DecimalError(PydanticTypeError): + msg_template = 'value is not a valid decimal' + + +class DecimalIsNotFiniteError(PydanticValueError): + code = 'decimal.not_finite' + msg_template = 'value is not a valid decimal' + + +class DecimalMaxDigitsError(PydanticValueError): + code = 'decimal.max_digits' + msg_template = 'ensure that there are no more than {max_digits} digits in total' + + def __init__(self, *, max_digits: int) -> None: + super().__init__(max_digits=max_digits) + + +class DecimalMaxPlacesError(PydanticValueError): + code = 'decimal.max_places' + msg_template = 'ensure that there are no more than {decimal_places} decimal places' + + def __init__(self, *, decimal_places: int) -> None: + super().__init__(decimal_places=decimal_places) + + +class DecimalWholeDigitsError(PydanticValueError): + code = 'decimal.whole_digits' + msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' + + def __init__(self, *, whole_digits: int) -> None: + super().__init__(whole_digits=whole_digits) + + +class DateTimeError(PydanticValueError): + msg_template = 'invalid datetime format' + + +class DateError(PydanticValueError): + msg_template = 'invalid date format' + + +class DateNotInThePastError(PydanticValueError): + code = 'date.not_in_the_past' + msg_template = 'date is not in the past' + + +class DateNotInTheFutureError(PydanticValueError): + code = 'date.not_in_the_future' + msg_template = 'date is not in the future' + + +class TimeError(PydanticValueError): + msg_template = 'invalid time format' + + +class DurationError(PydanticValueError): + msg_template = 'invalid duration format' + + +class HashableError(PydanticTypeError): + msg_template = 'value is not a valid hashable' + + +class UUIDError(PydanticTypeError): + msg_template = 'value is not a valid uuid' + + +class UUIDVersionError(PydanticValueError): + code = 'uuid.version' + msg_template = 'uuid version {required_version} expected' + + def __init__(self, *, required_version: int) -> None: + super().__init__(required_version=required_version) + + +class ArbitraryTypeError(PydanticTypeError): + code = 'arbitrary_type' + msg_template = 'instance of {expected_arbitrary_type} expected' + + def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: + super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) + + +class ClassError(PydanticTypeError): + code = 'class' + msg_template = 'a class is expected' + + +class SubclassError(PydanticTypeError): + code = 'subclass' + msg_template = 'subclass of {expected_class} expected' + + def __init__(self, *, expected_class: Type[Any]) -> None: + super().__init__(expected_class=display_as_type(expected_class)) + + +class JsonError(PydanticValueError): + msg_template = 'Invalid JSON' + + +class JsonTypeError(PydanticTypeError): + code = 'json' + msg_template = 'JSON object must be str, bytes or bytearray' + + +class PatternError(PydanticValueError): + code = 'regex_pattern' + msg_template = 'Invalid regular expression' + + +class DataclassTypeError(PydanticTypeError): + code = 'dataclass' + msg_template = 'instance of {class_name}, tuple or dict expected' + + +class CallableError(PydanticTypeError): + msg_template = '{value} is not callable' + + +class EnumError(PydanticTypeError): + code = 'enum_instance' + msg_template = '{value} is not a valid Enum instance' + + +class IntEnumError(PydanticTypeError): + code = 'int_enum_instance' + msg_template = '{value} is not a valid IntEnum instance' + + +class IPvAnyAddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 address' + + +class IPvAnyInterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 interface' + + +class IPvAnyNetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 network' + + +class IPv4AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 address' + + +class IPv6AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv6 address' + + +class IPv4NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 network' + + +class IPv6NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv6 network' + + +class IPv4InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 interface' + + +class IPv6InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv6 interface' + + +class ColorError(PydanticValueError): + msg_template = 'value is not a valid color: {reason}' + + +class StrictBoolError(PydanticValueError): + msg_template = 'value is not a valid boolean' + + +class NotDigitError(PydanticValueError): + code = 'payment_card_number.digits' + msg_template = 'card number is not all digits' + + +class LuhnValidationError(PydanticValueError): + code = 'payment_card_number.luhn_check' + msg_template = 'card number is not luhn valid' + + +class InvalidLengthForBrand(PydanticValueError): + code = 'payment_card_number.invalid_length_for_brand' + msg_template = 'Length for a {brand} card must be {required_length}' + + +class InvalidByteSize(PydanticValueError): + msg_template = 'could not parse value and unit from byte string' + + +class InvalidByteSizeUnit(PydanticValueError): + msg_template = 'could not interpret byte unit: {unit}' + + +class MissingDiscriminator(PydanticValueError): + code = 'discriminated_union.missing_discriminator' + msg_template = 'Discriminator {discriminator_key!r} is missing in value' + + +class InvalidDiscriminator(PydanticValueError): + code = 'discriminated_union.invalid_discriminator' + msg_template = ( + 'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} ' + '(allowed values: {allowed_values})' + ) + + def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: + super().__init__( + discriminator_key=discriminator_key, + discriminator_value=discriminator_value, + allowed_values=', '.join(map(repr, allowed_values)), + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.cp37-win_amd64.pyd new file mode 100644 index 00000000..07038d47 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.py new file mode 100644 index 00000000..cecd3d20 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/fields.py @@ -0,0 +1,1247 @@ +import copy +import re +from collections import Counter as CollectionCounter, defaultdict, deque +from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable +from typing import ( + TYPE_CHECKING, + Any, + Counter, + DefaultDict, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated, Final + +from . import errors as errors_ +from .class_validators import Validator, make_generic_validator, prep_validators +from .error_wrappers import ErrorWrapper +from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError +from .types import Json, JsonWrapper +from .typing import ( + NoArgAnyCallable, + convert_generics, + display_as_type, + get_args, + get_origin, + is_finalvar, + is_literal_type, + is_new_type, + is_none_type, + is_typeddict, + is_typeddict_special, + is_union, + new_type_supertype, +) +from .utils import ( + PyObjectStr, + Representation, + ValueItems, + get_discriminator_alias_and_values, + get_unique_discriminator_alias, + lenient_isinstance, + lenient_issubclass, + sequence_like, + smart_deepcopy, +) +from .validators import constant_validator, dict_validator, find_validators, validate_json + +Required: Any = Ellipsis + +T = TypeVar('T') + + +class UndefinedType: + def __repr__(self) -> str: + return 'PydanticUndefined' + + def __copy__(self: T) -> T: + return self + + def __reduce__(self) -> str: + return 'Undefined' + + def __deepcopy__(self: T, _: Any) -> T: + return self + + +Undefined = UndefinedType() + +if TYPE_CHECKING: + from .class_validators import ValidatorsList + from .config import BaseConfig + from .error_wrappers import ErrorList + from .types import ModelOrDc + from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs + + ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] + LocStr = Union[Tuple[Union[int, str], ...], str] + BoolUndefined = Union[bool, UndefinedType] + + +class FieldInfo(Representation): + """ + Captures extra information about a field. + """ + + __slots__ = ( + 'default', + 'default_factory', + 'alias', + 'alias_priority', + 'title', + 'description', + 'exclude', + 'include', + 'const', + 'gt', + 'ge', + 'lt', + 'le', + 'multiple_of', + 'allow_inf_nan', + 'max_digits', + 'decimal_places', + 'min_items', + 'max_items', + 'unique_items', + 'min_length', + 'max_length', + 'allow_mutation', + 'repr', + 'regex', + 'discriminator', + 'extra', + ) + + # field constraints with the default value, it's also used in update_from_config below + __field_constraints__ = { + 'min_length': None, + 'max_length': None, + 'regex': None, + 'gt': None, + 'lt': None, + 'ge': None, + 'le': None, + 'multiple_of': None, + 'allow_inf_nan': None, + 'max_digits': None, + 'decimal_places': None, + 'min_items': None, + 'max_items': None, + 'unique_items': None, + 'allow_mutation': True, + } + + def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: + self.default = default + self.default_factory = kwargs.pop('default_factory', None) + self.alias = kwargs.pop('alias', None) + self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None) + self.title = kwargs.pop('title', None) + self.description = kwargs.pop('description', None) + self.exclude = kwargs.pop('exclude', None) + self.include = kwargs.pop('include', None) + self.const = kwargs.pop('const', None) + self.gt = kwargs.pop('gt', None) + self.ge = kwargs.pop('ge', None) + self.lt = kwargs.pop('lt', None) + self.le = kwargs.pop('le', None) + self.multiple_of = kwargs.pop('multiple_of', None) + self.allow_inf_nan = kwargs.pop('allow_inf_nan', None) + self.max_digits = kwargs.pop('max_digits', None) + self.decimal_places = kwargs.pop('decimal_places', None) + self.min_items = kwargs.pop('min_items', None) + self.max_items = kwargs.pop('max_items', None) + self.unique_items = kwargs.pop('unique_items', None) + self.min_length = kwargs.pop('min_length', None) + self.max_length = kwargs.pop('max_length', None) + self.allow_mutation = kwargs.pop('allow_mutation', True) + self.regex = kwargs.pop('regex', None) + self.discriminator = kwargs.pop('discriminator', None) + self.repr = kwargs.pop('repr', True) + self.extra = kwargs + + def __repr_args__(self) -> 'ReprArgs': + + field_defaults_to_hide: Dict[str, Any] = { + 'repr': True, + **self.__field_constraints__, + } + + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] + + def get_constraints(self) -> Set[str]: + """ + Gets the constraints set on the field by comparing the constraint value with its default value + + :return: the constraints set on field_info + """ + return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} + + def update_from_config(self, from_config: Dict[str, Any]) -> None: + """ + Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. + """ + for attr_name, value in from_config.items(): + try: + current_value = getattr(self, attr_name) + except AttributeError: + # attr_name is not an attribute of FieldInfo, it should therefore be added to extra + # (except if extra already has this value!) + self.extra.setdefault(attr_name, value) + else: + if current_value is self.__field_constraints__.get(attr_name, None): + setattr(self, attr_name, value) + elif attr_name == 'exclude': + self.exclude = ValueItems.merge(value, current_value) + elif attr_name == 'include': + self.include = ValueItems.merge(value, current_value, intersect=True) + + def _validate(self) -> None: + if self.default is not Undefined and self.default_factory is not None: + raise ValueError('cannot specify both default and default_factory') + + +def Field( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, + alias: str = None, + title: str = None, + description: str = None, + exclude: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None, + include: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None, + const: bool = None, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: bool = None, + max_digits: int = None, + decimal_places: int = None, + min_items: int = None, + max_items: int = None, + unique_items: bool = None, + min_length: int = None, + max_length: int = None, + allow_mutation: bool = True, + regex: str = None, + discriminator: str = None, + repr: bool = True, + **extra: Any, +) -> Any: + """ + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments + apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. + + :param default: since this is replacing the field’s default, its first argument is used + to set the default, use ellipsis (``...``) to indicate the field is required + :param default_factory: callable that will be called when a default value is needed for this field + If both `default` and `default_factory` are set, an error is raised. + :param alias: the public name of the field + :param title: can be any string, used in the schema + :param description: can be any string, used in the schema + :param exclude: exclude this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param include: include this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param const: this field is required and *must* take it's default value + :param gt: only applies to numbers, requires the field to be "greater than". The schema + will have an ``exclusiveMinimum`` validation keyword + :param ge: only applies to numbers, requires the field to be "greater than or equal to". The + schema will have a ``minimum`` validation keyword + :param lt: only applies to numbers, requires the field to be "less than". The schema + will have an ``exclusiveMaximum`` validation keyword + :param le: only applies to numbers, requires the field to be "less than or equal to". The + schema will have a ``maximum`` validation keyword + :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The + schema will have a ``multipleOf`` validation keyword + :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), + which is a valid Python float. Default True, set to False for compatibility with JSON. + :param max_digits: only applies to Decimals, requires the field to have a maximum number + of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. + :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places + allowed. It does not include trailing decimal zeroes. + :param min_items: only applies to lists, requires the field to have a minimum number of + elements. The schema will have a ``minItems`` validation keyword + :param max_items: only applies to lists, requires the field to have a maximum number of + elements. The schema will have a ``maxItems`` validation keyword + :param unique_items: only applies to lists, requires the field not to have duplicated + elements. The schema will have a ``uniqueItems`` validation keyword + :param min_length: only applies to strings, requires the field to have a minimum length. The + schema will have a ``maximum`` validation keyword + :param max_length: only applies to strings, requires the field to have a maximum length. The + schema will have a ``maxLength`` validation keyword + :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is + assigned on an instance. The BaseModel Config must set validate_assignment to True + :param regex: only applies to strings, requires the field match against a regular expression + pattern string. The schema will have a ``pattern`` validation keyword + :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. + The `discriminator` is the name of this common field to shorten validation and improve generated schema + :param repr: show this field in the representation + :param **extra: any additional keyword arguments will be added as is to the schema + """ + field_info = FieldInfo( + default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + exclude=exclude, + include=include, + const=const, + gt=gt, + ge=ge, + lt=lt, + le=le, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + min_items=min_items, + max_items=max_items, + unique_items=unique_items, + min_length=min_length, + max_length=max_length, + allow_mutation=allow_mutation, + regex=regex, + discriminator=discriminator, + repr=repr, + **extra, + ) + field_info._validate() + return field_info + + +# used to be an enum but changed to int's for small performance improvement as less access overhead +SHAPE_SINGLETON = 1 +SHAPE_LIST = 2 +SHAPE_SET = 3 +SHAPE_MAPPING = 4 +SHAPE_TUPLE = 5 +SHAPE_TUPLE_ELLIPSIS = 6 +SHAPE_SEQUENCE = 7 +SHAPE_FROZENSET = 8 +SHAPE_ITERABLE = 9 +SHAPE_GENERIC = 10 +SHAPE_DEQUE = 11 +SHAPE_DICT = 12 +SHAPE_DEFAULTDICT = 13 +SHAPE_COUNTER = 14 +SHAPE_NAME_LOOKUP = { + SHAPE_LIST: 'List[{}]', + SHAPE_SET: 'Set[{}]', + SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', + SHAPE_SEQUENCE: 'Sequence[{}]', + SHAPE_FROZENSET: 'FrozenSet[{}]', + SHAPE_ITERABLE: 'Iterable[{}]', + SHAPE_DEQUE: 'Deque[{}]', + SHAPE_DICT: 'Dict[{}]', + SHAPE_DEFAULTDICT: 'DefaultDict[{}]', + SHAPE_COUNTER: 'Counter[{}]', +} + +MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} + + +class ModelField(Representation): + __slots__ = ( + 'type_', + 'outer_type_', + 'annotation', + 'sub_fields', + 'sub_fields_mapping', + 'key_field', + 'validators', + 'pre_validators', + 'post_validators', + 'default', + 'default_factory', + 'required', + 'final', + 'model_config', + 'name', + 'alias', + 'has_alias', + 'field_info', + 'discriminator_key', + 'discriminator_alias', + 'validate_always', + 'allow_none', + 'shape', + 'class_validators', + 'parse_json', + ) + + def __init__( + self, + *, + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]], + model_config: Type['BaseConfig'], + default: Any = None, + default_factory: Optional[NoArgAnyCallable] = None, + required: 'BoolUndefined' = Undefined, + final: bool = False, + alias: str = None, + field_info: Optional[FieldInfo] = None, + ) -> None: + + self.name: str = name + self.has_alias: bool = alias is not None + self.alias: str = alias if alias is not None else name + self.annotation = type_ + self.type_: Any = convert_generics(type_) + self.outer_type_: Any = type_ + self.class_validators = class_validators or {} + self.default: Any = default + self.default_factory: Optional[NoArgAnyCallable] = default_factory + self.required: 'BoolUndefined' = required + self.final: bool = final + self.model_config = model_config + self.field_info: FieldInfo = field_info or FieldInfo(default) + self.discriminator_key: Optional[str] = self.field_info.discriminator + self.discriminator_alias: Optional[str] = self.discriminator_key + + self.allow_none: bool = False + self.validate_always: bool = False + self.sub_fields: Optional[List[ModelField]] = None + self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union + self.key_field: Optional[ModelField] = None + self.validators: 'ValidatorsList' = [] + self.pre_validators: Optional['ValidatorsList'] = None + self.post_validators: Optional['ValidatorsList'] = None + self.parse_json: bool = False + self.shape: int = SHAPE_SINGLETON + self.model_config.prepare_field(self) + self.prepare() + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + @staticmethod + def _get_field_info( + field_name: str, annotation: Any, value: Any, config: Type['BaseConfig'] + ) -> Tuple[FieldInfo, Any]: + """ + Get a FieldInfo from a root typing.Annotated annotation, value, or config default. + + The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain + a FieldInfo, a new one will be created using the config. + + :param field_name: name of the field for use in error messages + :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` + :param value: the field's assigned value + :param config: the model's config object + :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. + """ + field_info_from_config = config.get_field_info(field_name) + + field_info = None + if get_origin(annotation) is Annotated: + field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] + if len(field_infos) > 1: + raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}') + field_info = next(iter(field_infos), None) + if field_info is not None: + field_info = copy.copy(field_info) + field_info.update_from_config(field_info_from_config) + if field_info.default not in (Undefined, Required): + raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}') + if value is not Undefined and value is not Required: + # check also `Required` because of `validate_arguments` that sets `...` as default value + field_info.default = value + + if isinstance(value, FieldInfo): + if field_info is not None: + raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}') + field_info = value + field_info.update_from_config(field_info_from_config) + elif field_info is None: + field_info = FieldInfo(value, **field_info_from_config) + value = None if field_info.default_factory is not None else field_info.default + field_info._validate() + return field_info, value + + @classmethod + def infer( + cls, + *, + name: str, + value: Any, + annotation: Any, + class_validators: Optional[Dict[str, Validator]], + config: Type['BaseConfig'], + ) -> 'ModelField': + from .schema import get_annotation_from_field_info + + field_info, value = cls._get_field_info(name, annotation, value, config) + required: 'BoolUndefined' = Undefined + if value is Required: + required = True + value = None + elif value is not Undefined: + required = False + annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) + + return cls( + name=name, + type_=annotation, + alias=field_info.alias, + class_validators=class_validators, + default=value, + default_factory=field_info.default_factory, + required=required, + model_config=config, + field_info=field_info, + ) + + def set_config(self, config: Type['BaseConfig']) -> None: + self.model_config = config + info_from_config = config.get_field_info(self.name) + config.prepare_field(self) + new_alias = info_from_config.get('alias') + new_alias_priority = info_from_config.get('alias_priority') or 0 + if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): + self.field_info.alias = new_alias + self.field_info.alias_priority = new_alias_priority + self.alias = new_alias + new_exclude = info_from_config.get('exclude') + if new_exclude is not None: + self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) + new_include = info_from_config.get('include') + if new_include is not None: + self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) + + @property + def alt_alias(self) -> bool: + return self.name != self.alias + + def prepare(self) -> None: + """ + Prepare the field but inspecting self.default, self.type_ etc. + + Note: this method is **not** idempotent (because _type_analysis is not idempotent), + e.g. calling it it multiple times may modify the field and configure it incorrectly. + """ + self._set_default_and_type() + if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: + # self.type_ is currently a ForwardRef and there's nothing we can do now, + # user will need to call model.update_forward_refs() + return + + self._type_analysis() + if self.required is Undefined: + self.required = True + if self.default is Undefined and self.default_factory is None: + self.default = None + self.populate_validators() + + def _set_default_and_type(self) -> None: + """ + Set the default value, infer the type if needed and check if `None` value is valid. + """ + if self.default_factory is not None: + if self.type_ is Undefined: + raise errors_.ConfigError( + f'you need to set the type of field {self.name!r} when using `default_factory`' + ) + return + + default_value = self.get_default() + + if default_value is not None and self.type_ is Undefined: + self.type_ = default_value.__class__ + self.outer_type_ = self.type_ + self.annotation = self.type_ + + if self.type_ is Undefined: + raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') + + if self.required is False and default_value is None: + self.allow_none = True + + def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) + # typing interface is horrible, we have to do some ugly checks + if lenient_issubclass(self.type_, JsonWrapper): + self.type_ = self.type_.inner_type + self.parse_json = True + elif lenient_issubclass(self.type_, Json): + self.type_ = Any + self.parse_json = True + elif isinstance(self.type_, TypeVar): + if self.type_.__bound__: + self.type_ = self.type_.__bound__ + elif self.type_.__constraints__: + self.type_ = Union[self.type_.__constraints__] + else: + self.type_ = Any + elif is_new_type(self.type_): + self.type_ = new_type_supertype(self.type_) + + if self.type_ is Any or self.type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + return + elif self.type_ is Pattern or self.type_ is re.Pattern: + # python 3.7 only, Pattern is a typing object but without sub fields + return + elif is_literal_type(self.type_): + return + elif is_typeddict(self.type_): + return + + if is_finalvar(self.type_): + self.final = True + + if self.type_ is Final: + self.type_ = Any + else: + self.type_ = get_args(self.type_)[0] + + self._type_analysis() + return + + origin = get_origin(self.type_) + + if origin is Annotated or is_typeddict_special(origin): + self.type_ = get_args(self.type_)[0] + self._type_analysis() + return + + if self.discriminator_key is not None and not is_union(origin): + raise TypeError('`discriminator` can only be used with `Union` type with more than one variant') + + # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` + if origin is None or origin is CollectionsHashable: + # field is not "typing" object eg. Union, Dict, List etc. + # allow None for virtual superclasses of NoneType, e.g. Hashable + if isinstance(self.type_, type) and isinstance(None, self.type_): + self.allow_none = True + return + elif origin is Callable: + return + elif is_union(origin): + types_ = [] + for type_ in get_args(self.type_): + if is_none_type(type_) or type_ is Any or type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + if is_none_type(type_): + continue + types_.append(type_) + + if len(types_) == 1: + # Optional[] + self.type_ = types_[0] + # this is the one case where the "outer type" isn't just the original type + self.outer_type_ = self.type_ + # re-run to correctly interpret the new self.type_ + self._type_analysis() + else: + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] + + if self.discriminator_key is not None: + self.prepare_discriminated_union_sub_fields() + return + elif issubclass(origin, Tuple): # type: ignore + # origin == Tuple without item type + args = get_args(self.type_) + if not args: # plain tuple + self.type_ = Any + self.shape = SHAPE_TUPLE_ELLIPSIS + elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] + self.type_ = args[0] + self.shape = SHAPE_TUPLE_ELLIPSIS + self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')] + elif args == ((),): # Tuple[()] means empty tuple + self.shape = SHAPE_TUPLE + self.type_ = Any + self.sub_fields = [] + else: + self.shape = SHAPE_TUPLE + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)] + return + elif issubclass(origin, List): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_LIST + elif issubclass(origin, Set): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SET + elif issubclass(origin, FrozenSet): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_FROZENSET + elif issubclass(origin, Deque): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_DEQUE + elif issubclass(origin, Sequence): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SEQUENCE + # priority to most common mapping: dict + elif origin is dict or origin is Dict: + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DICT + elif issubclass(origin, DefaultDict): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DEFAULTDICT + elif issubclass(origin, Counter): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = int + self.shape = SHAPE_COUNTER + elif issubclass(origin, Mapping): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_MAPPING + # Equality check as almost everything inherits form Iterable, including str + # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other + elif origin in {Iterable, CollectionsIterable}: + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_ITERABLE + self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')] + elif issubclass(origin, Type): # type: ignore + return + elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed: + # Is a Pydantic-compatible generic that handles itself + # or we have arbitrary_types_allowed = True + self.shape = SHAPE_GENERIC + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))] + self.type_ = origin + return + else: + raise TypeError(f'Fields of type "{origin}" are not supported.') + + # type_ has been refined eg. as the type of a List and sub_fields needs to be populated + self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] + + def prepare_discriminated_union_sub_fields(self) -> None: + """ + Prepare the mapping -> and update `sub_fields` + Note that this process can be aborted if a `ForwardRef` is encountered + """ + assert self.discriminator_key is not None + + if self.type_.__class__ is DeferredType: + return + + assert self.sub_fields is not None + sub_fields_mapping: Dict[str, 'ModelField'] = {} + all_aliases: Set[str] = set() + + for sub_field in self.sub_fields: + t = sub_field.type_ + if t.__class__ is ForwardRef: + # Stopping everything...will need to call `update_forward_refs` + return + + alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) + all_aliases.add(alias) + for discriminator_value in discriminator_values: + sub_fields_mapping[discriminator_value] = sub_field + + self.sub_fields_mapping = sub_fields_mapping + self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) + + def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField': + if for_keys: + class_validators = None + else: + # validators for sub items should not have `each_item` as we want to check only the first sublevel + class_validators = { + k: Validator( + func=v.func, + pre=v.pre, + each_item=False, + always=v.always, + check_fields=v.check_fields, + skip_on_failure=v.skip_on_failure, + ) + for k, v in self.class_validators.items() + if v.each_item + } + + field_info, _ = self._get_field_info(name, type_, None, self.model_config) + + return self.__class__( + type_=type_, + name=name, + class_validators=class_validators, + model_config=self.model_config, + field_info=field_info, + ) + + def populate_validators(self) -> None: + """ + Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ + and class validators. This method should be idempotent, e.g. it should be safe to call multiple times + without mis-configuring the field. + """ + self.validate_always = getattr(self.type_, 'validate_always', False) or any( + v.always for v in self.class_validators.values() + ) + + class_validators_ = self.class_validators.values() + if not self.sub_fields or self.shape == SHAPE_GENERIC: + get_validators = getattr(self.type_, '__get_validators__', None) + v_funcs = ( + *[v.func for v in class_validators_ if v.each_item and v.pre], + *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), + *[v.func for v in class_validators_ if v.each_item and not v.pre], + ) + self.validators = prep_validators(v_funcs) + + self.pre_validators = [] + self.post_validators = [] + + if self.field_info and self.field_info.const: + self.post_validators.append(make_generic_validator(constant_validator)) + + if class_validators_: + self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) + self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) + + if self.parse_json: + self.pre_validators.append(make_generic_validator(validate_json)) + + self.pre_validators = self.pre_validators or None + self.post_validators = self.post_validators or None + + def validate( + self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None + ) -> 'ValidateReturn': + + assert self.type_.__class__ is not DeferredType + + if self.type_.__class__ is ForwardRef: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + + errors: Optional['ErrorList'] + if self.pre_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) + if errors: + return v, errors + + if v is None: + if is_none_type(self.type_): + # keep validating + pass + elif self.allow_none: + if self.post_validators: + return self._apply_validators(v, values, loc, cls, self.post_validators) + else: + return None, None + else: + return v, ErrorWrapper(NoneIsNotAllowedError(), loc) + + if self.shape == SHAPE_SINGLETON: + v, errors = self._validate_singleton(v, values, loc, cls) + elif self.shape in MAPPING_LIKE_SHAPES: + v, errors = self._validate_mapping_like(v, values, loc, cls) + elif self.shape == SHAPE_TUPLE: + v, errors = self._validate_tuple(v, values, loc, cls) + elif self.shape == SHAPE_ITERABLE: + v, errors = self._validate_iterable(v, values, loc, cls) + elif self.shape == SHAPE_GENERIC: + v, errors = self._apply_validators(v, values, loc, cls, self.validators) + else: + # sequence, list, set, generator, tuple with ellipsis, frozen set + v, errors = self._validate_sequence_like(v, values, loc, cls) + + if not errors and self.post_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) + return v, errors + + def _validate_sequence_like( # noqa: C901 (ignore complexity) + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate sequence-like containers: lists, tuples, sets and generators + Note that large if-else blocks are necessary to enable Cython + optimization, which is why we disable the complexity check above. + """ + if not sequence_like(v): + e: errors_.PydanticTypeError + if self.shape == SHAPE_LIST: + e = errors_.ListError() + elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): + e = errors_.TupleError() + elif self.shape == SHAPE_SET: + e = errors_.SetError() + elif self.shape == SHAPE_FROZENSET: + e = errors_.FrozenSetError() + else: + e = errors_.SequenceError() + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, v_ in enumerate(v): + v_loc = *loc, i + r, ee = self._validate_singleton(v_, values, v_loc, cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + + converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result + + if self.shape == SHAPE_SET: + converted = set(result) + elif self.shape == SHAPE_FROZENSET: + converted = frozenset(result) + elif self.shape == SHAPE_TUPLE_ELLIPSIS: + converted = tuple(result) + elif self.shape == SHAPE_DEQUE: + converted = deque(result) + elif self.shape == SHAPE_SEQUENCE: + if isinstance(v, tuple): + converted = tuple(result) + elif isinstance(v, set): + converted = set(result) + elif isinstance(v, Generator): + converted = iter(result) + elif isinstance(v, deque): + converted = deque(result) + return converted, None + + def _validate_iterable( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate Iterables. + + This intentionally doesn't validate values to allow infinite generators. + """ + + try: + iterable = iter(v) + except TypeError: + return v, ErrorWrapper(errors_.IterableError(), loc) + return iterable, None + + def _validate_tuple( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + e: Optional[Exception] = None + if not sequence_like(v): + e = errors_.TupleError() + else: + actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore + if actual_length != expected_length: + e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) + + if e: + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore + v_loc = *loc, i + r, ee = field.validate(v_, values, loc=v_loc, cls=cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + else: + return tuple(result), None + + def _validate_mapping_like( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + try: + v_iter = dict_validator(v) + except TypeError as exc: + return v, ErrorWrapper(exc, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result, errors = {}, [] + for k, v_ in v_iter.items(): + v_loc = *loc, '__key__' + key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore + if key_errors: + errors.append(key_errors) + continue + + v_loc = *loc, k + value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) + if value_errors: + errors.append(value_errors) + continue + + result[key_result] = value_result + if errors: + return v, errors + elif self.shape == SHAPE_DICT: + return result, None + elif self.shape == SHAPE_DEFAULTDICT: + return defaultdict(self.type_, result), None + elif self.shape == SHAPE_COUNTER: + return CollectionCounter(result), None + else: + return self._get_mapping_value(v, result), None + + def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: + """ + When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid + coercing to `dict` unwillingly. + """ + original_cls = original.__class__ + + if original_cls == dict or original_cls == Dict: + return converted + elif original_cls in {defaultdict, DefaultDict}: + return defaultdict(self.type_, converted) + else: + try: + # Counter, OrderedDict, UserDict, ... + return original_cls(converted) # type: ignore + except TypeError: + raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None + + def _validate_singleton( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + if self.sub_fields: + if self.discriminator_key is not None: + return self._validate_discriminated_union(v, values, loc, cls) + + errors = [] + + if self.model_config.smart_union and is_union(get_origin(self.type_)): + # 1st pass: check if the value is an exact instance of one of the Union types + # (e.g. to avoid coercing a bool into an int) + for field in self.sub_fields: + if v.__class__ is field.outer_type_: + return v, None + + # 2nd pass: check if the value is an instance of any subclass of the Union types + for field in self.sub_fields: + # This whole logic will be improved later on to support more complex `isinstance` checks + # It will probably be done once a strict mode is added and be something like: + # ``` + # value, error = field.validate(v, values, strict=True) + # if error is None: + # return value, None + # ``` + try: + if isinstance(v, field.outer_type_): + return v, None + except TypeError: + # compound type + if lenient_isinstance(v, get_origin(field.outer_type_)): + value, error = field.validate(v, values, loc=loc, cls=cls) + if not error: + return value, None + + # 1st pass by default or 3rd pass with `smart_union` enabled: + # check if the value can be coerced into one of the Union types + for field in self.sub_fields: + value, error = field.validate(v, values, loc=loc, cls=cls) + if error: + errors.append(error) + else: + return value, None + return v, errors + else: + return self._apply_validators(v, values, loc, cls, self.validators) + + def _validate_discriminated_union( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + assert self.discriminator_key is not None + assert self.discriminator_alias is not None + + try: + discriminator_value = v[self.discriminator_alias] + except KeyError: + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + except TypeError: + try: + # BaseModel or dataclass + discriminator_value = getattr(v, self.discriminator_key) + except (AttributeError, TypeError): + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + + try: + sub_field = self.sub_fields_mapping[discriminator_value] # type: ignore[index] + except TypeError: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + except KeyError: + assert self.sub_fields_mapping is not None + return v, ErrorWrapper( + InvalidDiscriminator( + discriminator_key=self.discriminator_key, + discriminator_value=discriminator_value, + allowed_values=list(self.sub_fields_mapping), + ), + loc, + ) + else: + if not isinstance(loc, tuple): + loc = (loc,) + return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) + + def _apply_validators( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' + ) -> 'ValidateReturn': + for validator in validators: + try: + v = validator(cls, v, values, self, self.model_config) + except (ValueError, TypeError, AssertionError) as exc: + return v, ErrorWrapper(exc, loc) + return v, None + + def is_complex(self) -> bool: + """ + Whether the field is "complex" eg. env variables should be parsed as JSON. + """ + from .main import BaseModel + + return ( + self.shape != SHAPE_SINGLETON + or hasattr(self.type_, '__pydantic_model__') + or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) + ) + + def _type_display(self) -> PyObjectStr: + t = display_as_type(self.type_) + + if self.shape in MAPPING_LIKE_SHAPES: + t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore + elif self.shape == SHAPE_TUPLE: + t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore + elif self.shape == SHAPE_GENERIC: + assert self.sub_fields + t = '{}[{}]'.format( + display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields) + ) + elif self.shape != SHAPE_SINGLETON: + t = SHAPE_NAME_LOOKUP[self.shape].format(t) + + if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): + t = f'Optional[{t}]' + return PyObjectStr(t) + + def __repr_args__(self) -> 'ReprArgs': + args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] + + if not self.required: + if self.default_factory is not None: + args.append(('default_factory', f'')) + else: + args.append(('default', self.default)) + + if self.alt_alias: + args.append(('alias', self.alias)) + return args + + +class ModelPrivateAttr(Representation): + __slots__ = ('default', 'default_factory') + + def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: + self.default = default + self.default_factory = default_factory + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( + other.default, + other.default_factory, + ) + + +def PrivateAttr( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, +) -> Any: + """ + Indicates that attribute is only used internally and never mixed with regular fields. + + Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. + + Private attrs are stored in model __slots__. + + :param default: the attribute’s default value + :param default_factory: callable that will be called when a default value is needed for this attribute + If both `default` and `default_factory` are set, an error is raised. + """ + if default is not Undefined and default_factory is not None: + raise ValueError('cannot specify both default and default_factory') + + return ModelPrivateAttr( + default, + default_factory=default_factory, + ) + + +class DeferredType: + """ + Used to postpone field preparation, while creating recursive generic models. + """ + + +def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: + return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/generics.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/generics.py new file mode 100644 index 00000000..c43ac3e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/generics.py @@ -0,0 +1,360 @@ +import sys +import typing +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Generic, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from typing_extensions import Annotated + +from .class_validators import gather_all_validators +from .fields import DeferredType +from .main import BaseModel, create_model +from .types import JsonWrapper +from .typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base +from .utils import LimitedDict, all_identical, lenient_issubclass + +GenericModelT = TypeVar('GenericModelT', bound='GenericModel') +TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type + +Parametrization = Mapping[TypeVarType, Type[Any]] + +_generic_types_cache: LimitedDict[Tuple[Type[Any], Union[Any, Tuple[Any, ...]]], Type[BaseModel]] = LimitedDict() +# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations +# as captured during construction of the class (not instances). +# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created, +# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`. +# (This information is only otherwise available after creation from the class name string). +_assigned_parameters: LimitedDict[Type[Any], Parametrization] = LimitedDict() + + +class GenericModel(BaseModel): + __slots__ = () + __concrete__: ClassVar[bool] = False + + if TYPE_CHECKING: + # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with + # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of + # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below. + __parameters__: ClassVar[Tuple[TypeVarType, ...]] + + # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings + def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: + """Instantiates a new class from a generic class `cls` and type variables `params`. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: New model class inheriting from `cls` with instantiated + types described by `params`. If no parameters are given, `cls` is + returned as is. + + """ + cached = _generic_types_cache.get((cls, params)) + if cached is not None: + return cached + if cls.__concrete__ and Generic not in cls.__bases__: + raise TypeError('Cannot parameterize a concrete instantiation of a generic model') + if not isinstance(params, tuple): + params = (params,) + if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): + raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel') + if not hasattr(cls, '__parameters__'): + raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') + + check_parameters_count(cls, params) + # Build map from generic typevars to passed params + typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params)) + if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: + return cls # if arguments are equal to parameters it's the same object + + # Create new model with original model as parent inserting fields with DeferredType. + model_name = cls.__concrete_name__(params) + validators = gather_all_validators(cls) + + type_hints = get_all_type_hints(cls).items() + instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar} + + fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__} + + model_module, called_globally = get_caller_frame_info() + created_model = cast( + Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes + create_model( + model_name, + __module__=model_module or cls.__module__, + __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)), + __config__=None, + __validators__=validators, + __cls_kwargs__=None, + **fields, + ), + ) + + _assigned_parameters[created_model] = typevars_map + + if called_globally: # create global reference and therefore allow pickling + object_by_reference = None + reference_name = model_name + reference_module_globals = sys.modules[created_model.__module__].__dict__ + while object_by_reference is not created_model: + object_by_reference = reference_module_globals.setdefault(reference_name, created_model) + reference_name += '_' + + created_model.Config = cls.Config + + # Find any typevars that are still present in the model. + # If none are left, the model is fully "concrete", otherwise the new + # class is a generic class as well taking the found typevars as + # parameters. + new_params = tuple( + {param: None for param in iter_contained_typevars(typevars_map.values())} + ) # use dict as ordered set + created_model.__concrete__ = not new_params + if new_params: + created_model.__parameters__ = new_params + + # Save created model in cache so we don't end up creating duplicate + # models that should be identical. + _generic_types_cache[(cls, params)] = created_model + if len(params) == 1: + _generic_types_cache[(cls, params[0])] = created_model + + # Recursively walk class type hints and replace generic typevars + # with concrete types that were passed. + _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map) + + return created_model + + @classmethod + def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: + """Compute class name for child classes. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: String representing a the new class where `params` are + passed to `cls` as type variables. + + This method can be overridden to achieve a custom naming scheme for GenericModels. + """ + param_names = [display_as_type(param) for param in params] + params_component = ', '.join(param_names) + return f'{cls.__name__}[{params_component}]' + + @classmethod + def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]: + """ + Returns unbound bases of cls parameterised to given type variables + + :param typevars_map: Dictionary of type applications for binding subclasses. + Given a generic class `Model` with 2 type variables [S, T] + and a concrete model `Model[str, int]`, + the value `{S: str, T: int}` would be passed to `typevars_map`. + :return: an iterator of generic sub classes, parameterised by `typevars_map` + and other assigned parameters of `cls` + + e.g.: + ``` + class A(GenericModel, Generic[T]): + ... + + class B(A[V], Generic[V]): + ... + + assert A[int] in B.__parameterized_bases__({V: int}) + ``` + """ + + def build_base_model( + base_model: Type[GenericModel], mapped_types: Parametrization + ) -> Iterator[Type[GenericModel]]: + base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__) + parameterized_base = base_model.__class_getitem__(base_parameters) + if parameterized_base is base_model or parameterized_base is cls: + # Avoid duplication in MRO + return + yield parameterized_base + + for base_model in cls.__bases__: + if not issubclass(base_model, GenericModel): + # not a class that can be meaningfully parameterized + continue + elif not getattr(base_model, '__parameters__', None): + # base_model is "GenericModel" (and has no __parameters__) + # or + # base_model is already concrete, and will be included transitively via cls. + continue + elif cls in _assigned_parameters: + if base_model in _assigned_parameters: + # cls is partially parameterised but not from base_model + # e.g. cls = B[S], base_model = A[S] + # B[S][int] should subclass A[int], (and will be transitively via B[int]) + # but it's not viable to consistently subclass types with arbitrary construction + # So don't attempt to include A[S][int] + continue + else: # base_model not in _assigned_parameters: + # cls is partially parameterized, base_model is original generic + # e.g. cls = B[str, T], base_model = B[S, T] + # Need to determine the mapping for the base_model parameters + mapped_types: Parametrization = { + key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items() + } + yield from build_base_model(base_model, mapped_types) + else: + # cls is base generic, so base_class has a distinct base + # can construct the Parameterised base model using typevars_map directly + yield from build_base_model(base_model, typevars_map) + + +def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any: + """Return type with all occurrences of `type_map` keys recursively replaced with their values. + + :param type_: Any type, class or generic alias + :param type_map: Mapping from `TypeVar` instance to concrete types. + :return: New type representing the basic structure of `type_` with all + `typevar_map` keys recursively replaced. + + >>> replace_types(Tuple[str, Union[List[str], float]], {str: int}) + Tuple[int, Union[List[int], float]] + + """ + if not type_map: + return type_ + + type_args = get_args(type_) + origin_type = get_origin(type_) + + if origin_type is Annotated: + annotated_type, *annotations = type_args + return Annotated[replace_types(annotated_type, type_map), tuple(annotations)] + + # Having type args is a good indicator that this is a typing module + # class instantiation or a generic alias of some sort. + if type_args: + resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) + if all_identical(type_args, resolved_type_args): + # If all arguments are the same, there is no need to modify the + # type or create a new object at all + return type_ + if ( + origin_type is not None + and isinstance(type_, typing_base) + and not isinstance(origin_type, typing_base) + and getattr(type_, '_name', None) is not None + ): + # In python < 3.9 generic aliases don't exist so any of these like `list`, + # `type` or `collections.abc.Callable` need to be translated. + # See: https://www.python.org/dev/peps/pep-0585 + origin_type = getattr(typing, type_._name) + assert origin_type is not None + return origin_type[resolved_type_args] + + # We handle pydantic generic models separately as they don't have the same + # semantics as "typing" classes or generic aliases + if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__: + type_args = type_.__parameters__ + resolved_type_args = tuple(replace_types(t, type_map) for t in type_args) + if all_identical(type_args, resolved_type_args): + return type_ + return type_[resolved_type_args] + + # Handle special case for typehints that can have lists as arguments. + # `typing.Callable[[int, str], int]` is an example for this. + if isinstance(type_, (List, list)): + resolved_list = list(replace_types(element, type_map) for element in type_) + if all_identical(type_, resolved_list): + return type_ + return resolved_list + + # For JsonWrapperValue, need to handle its inner type to allow correct parsing + # of generic Json arguments like Json[T] + if not origin_type and lenient_issubclass(type_, JsonWrapper): + type_.inner_type = replace_types(type_.inner_type, type_map) + return type_ + + # If all else fails, we try to resolve the type directly and otherwise just + # return the input with no modifications. + return type_map.get(type_, type_) + + +def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: + actual = len(parameters) + expected = len(cls.__parameters__) + if actual != expected: + description = 'many' if actual > expected else 'few' + raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') + + +DictValues: Type[Any] = {}.values().__class__ + + +def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: + """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.""" + if isinstance(v, TypeVar): + yield v + elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel): + yield from v.__parameters__ + elif isinstance(v, (DictValues, list)): + for var in v: + yield from iter_contained_typevars(var) + else: + args = get_args(v) + for arg in args: + yield from iter_contained_typevars(arg) + + +def get_caller_frame_info() -> Tuple[Optional[str], bool]: + """ + Used inside a function to check whether it was called globally + + Will only work against non-compiled code, therefore used only in pydantic.generics + + :returns Tuple[module_name, called_globally] + """ + try: + previous_caller_frame = sys._getframe(2) + except ValueError as e: + raise RuntimeError('This function must be used inside another function') from e + except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it + return None, False + frame_globals = previous_caller_frame.f_globals + return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals + + +def _prepare_model_fields( + created_model: Type[GenericModel], + fields: Mapping[str, Any], + instance_type_hints: Mapping[str, type], + typevars_map: Mapping[Any, type], +) -> None: + """ + Replace DeferredType fields with concrete type hints and prepare them. + """ + + for key, field in created_model.__fields__.items(): + if key not in fields: + assert field.type_.__class__ is not DeferredType + # https://github.com/nedbat/coveragepy/issues/198 + continue # pragma: no cover + + assert field.type_.__class__ is DeferredType, field.type_.__class__ + + field_type_hint = instance_type_hints[key] + concrete_type = replace_types(field_type_hint, typevars_map) + field.type_ = concrete_type + field.outer_type_ = concrete_type + field.prepare() + created_model.__annotations__[key] = concrete_type diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.cp37-win_amd64.pyd new file mode 100644 index 00000000..8ef4479c Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.py new file mode 100644 index 00000000..b358b850 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/json.py @@ -0,0 +1,112 @@ +import datetime +from collections import deque +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from re import Pattern +from types import GeneratorType +from typing import Any, Callable, Dict, Type, Union +from uuid import UUID + +from .color import Color +from .networks import NameEmail +from .types import SecretBytes, SecretStr + +__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' + + +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +def pydantic_encoder(obj: Any) -> Any: + from dataclasses import asdict, is_dataclass + + from .main import BaseModel + + if isinstance(obj, BaseModel): + return obj.dict() + elif is_dataclass(obj): + return asdict(obj) + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") + + +def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = type_encoders[base] + except KeyError: + continue + + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + return pydantic_encoder(obj) + + +def timedelta_isoformat(td: datetime.timedelta) -> str: + """ + ISO 8601 encoding for Python timedelta object. + """ + minutes, seconds = divmod(td.seconds, 60) + hours, minutes = divmod(minutes, 60) + return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.cp37-win_amd64.pyd new file mode 100644 index 00000000..f55d6596 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.py new file mode 100644 index 00000000..69f3b751 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/main.py @@ -0,0 +1,1109 @@ +import warnings +from abc import ABCMeta +from copy import deepcopy +from enum import Enum +from functools import partial +from pathlib import Path +from types import FunctionType, prepare_class, resolve_bases +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + no_type_check, + overload, +) + +from typing_extensions import dataclass_transform + +from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators +from .config import BaseConfig, Extra, inherit_config, prepare_config +from .error_wrappers import ErrorWrapper, ValidationError +from .errors import ConfigError, DictError, ExtraError, MissingError +from .fields import ( + MAPPING_LIKE_SHAPES, + Field, + FieldInfo, + ModelField, + ModelPrivateAttr, + PrivateAttr, + Undefined, + is_finalvar_with_default_val, +) +from .json import custom_pydantic_encoder, pydantic_encoder +from .parse import Protocol, load_file, load_str_bytes +from .schema import default_ref_template, model_schema +from .types import PyObject, StrBytes +from .typing import ( + AnyCallable, + get_args, + get_origin, + is_classvar, + is_namedtuple, + is_union, + resolve_annotations, + update_model_forward_refs, +) +from .utils import ( + DUNDER_ATTRIBUTES, + ROOT_KEY, + ClassAttribute, + GetterDict, + Representation, + ValueItems, + generate_model_signature, + is_valid_field, + is_valid_private_name, + lenient_issubclass, + sequence_like, + smart_deepcopy, + unique_list, + validate_field_name, +) + +if TYPE_CHECKING: + from inspect import Signature + + from .class_validators import ValidatorListDict + from .types import ModelOrDc + from .typing import ( + AbstractSetIntStr, + AnyClassMethod, + CallableGenerator, + DictAny, + DictStrAny, + MappingIntStrAny, + ReprArgs, + SetStr, + TupleGenerator, + ) + + Model = TypeVar('Model', bound='BaseModel') + +__all__ = 'BaseModel', 'create_model', 'validate_model' + +_T = TypeVar('_T') + + +def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: + if len(fields) > 1: + raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields') + + +def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: + def hash_function(self_: Any) -> int: + return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) + + return hash_function if frozen else None + + +# If a field is of type `Callable`, its default value should be a function and cannot to ignored. +ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) +# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model +UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES +# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra +# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's +# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for +# the `BaseModel` class, since that's defined immediately after the metaclass. +_is_base_model_class_defined = False + + +@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo)) +class ModelMetaclass(ABCMeta): + @no_type_check # noqa C901 + def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 + fields: Dict[str, ModelField] = {} + config = BaseConfig + validators: 'ValidatorListDict' = {} + + pre_root_validators, post_root_validators = [], [] + private_attributes: Dict[str, ModelPrivateAttr] = {} + base_private_attributes: Dict[str, ModelPrivateAttr] = {} + slots: SetStr = namespace.get('__slots__', ()) + slots = {slots} if isinstance(slots, str) else set(slots) + class_vars: SetStr = set() + hash_func: Optional[Callable[[Any], int]] = None + + for base in reversed(bases): + if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: + fields.update(smart_deepcopy(base.__fields__)) + config = inherit_config(base.__config__, config) + validators = inherit_validators(base.__validators__, validators) + pre_root_validators += base.__pre_root_validators__ + post_root_validators += base.__post_root_validators__ + base_private_attributes.update(base.__private_attributes__) + class_vars.update(base.__class_vars__) + hash_func = base.__hash__ + + resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True) + allowed_config_kwargs: SetStr = { + key + for key in dir(config) + if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes + } + config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} + config_from_namespace = namespace.get('Config') + if config_kwargs and config_from_namespace: + raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs') + config = inherit_config(config_from_namespace, config, **config_kwargs) + + validators = inherit_validators(extract_validators(namespace), validators) + vg = ValidatorGroup(validators) + + for f in fields.values(): + f.set_config(config) + extra_validators = vg.get_validators(f.name) + if extra_validators: + f.class_validators.update(extra_validators) + # re-run prepare to add extra validators + f.populate_validators() + + prepare_config(config, name) + + untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES + + def is_untouched(v: Any) -> bool: + return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method' + + if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): + annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None)) + # annotation only fields need to come first in fields + for ann_name, ann_type in annotations.items(): + if is_classvar(ann_type): + class_vars.add(ann_name) + elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): + class_vars.add(ann_name) + elif is_valid_field(ann_name): + validate_field_name(bases, ann_name) + value = namespace.get(ann_name, Undefined) + allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) + if ( + is_untouched(value) + and ann_type != PyObject + and not any( + lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types + ) + ): + continue + fields[ann_name] = ModelField.infer( + name=ann_name, + value=value, + annotation=ann_type, + class_validators=vg.get_validators(ann_name), + config=config, + ) + elif ann_name not in namespace and config.underscore_attrs_are_private: + private_attributes[ann_name] = PrivateAttr() + + untouched_types = UNTOUCHED_TYPES + config.keep_untouched + for var_name, value in namespace.items(): + can_be_changed = var_name not in class_vars and not is_untouched(value) + if isinstance(value, ModelPrivateAttr): + if not is_valid_private_name(var_name): + raise NameError( + f'Private attributes "{var_name}" must not be a valid field name; ' + f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' + ) + private_attributes[var_name] = value + elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: + private_attributes[var_name] = PrivateAttr(default=value) + elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: + validate_field_name(bases, var_name) + inferred = ModelField.infer( + name=var_name, + value=value, + annotation=annotations.get(var_name, Undefined), + class_validators=vg.get_validators(var_name), + config=config, + ) + if var_name in fields: + if lenient_issubclass(inferred.type_, fields[var_name].type_): + inferred.type_ = fields[var_name].type_ + else: + raise TypeError( + f'The type of {name}.{var_name} differs from the new default value; ' + f'if you wish to change the type of this field, please use a type annotation' + ) + fields[var_name] = inferred + + _custom_root_type = ROOT_KEY in fields + if _custom_root_type: + validate_custom_root_type(fields) + vg.check_for_unused() + if config.json_encoders: + json_encoder = partial(custom_pydantic_encoder, config.json_encoders) + else: + json_encoder = pydantic_encoder + pre_rv_new, post_rv_new = extract_root_validators(namespace) + + if hash_func is None: + hash_func = generate_hash_function(config.frozen) + + exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'} + new_namespace = { + '__config__': config, + '__fields__': fields, + '__exclude_fields__': { + name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None + } + or None, + '__include_fields__': { + name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None + } + or None, + '__validators__': vg.validators, + '__pre_root_validators__': unique_list( + pre_root_validators + pre_rv_new, + name_factory=lambda v: v.__name__, + ), + '__post_root_validators__': unique_list( + post_root_validators + post_rv_new, + name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, + ), + '__schema_cache__': {}, + '__json_encoder__': staticmethod(json_encoder), + '__custom_root_type__': _custom_root_type, + '__private_attributes__': {**base_private_attributes, **private_attributes}, + '__slots__': slots | private_attributes.keys(), + '__hash__': hash_func, + '__class_vars__': class_vars, + **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, + } + + cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) + # set __signature__ attr only for model class, but not for its instances + cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config)) + if resolve_forward_refs: + cls.__try_update_forward_refs__() + + # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 + # for attributes not in `new_namespace` (e.g. private attributes) + for name, obj in namespace.items(): + if name not in new_namespace: + set_name = getattr(obj, '__set_name__', None) + if callable(set_name): + set_name(cls, name) + + return cls + + def __instancecheck__(self, instance: Any) -> bool: + """ + Avoid calling ABC _abc_subclasscheck unless we're pretty sure. + + See #3829 and python/cpython#92810 + """ + return hasattr(instance, '__fields__') and super().__instancecheck__(instance) + + +object_setattr = object.__setattr__ + + +class BaseModel(Representation, metaclass=ModelMetaclass): + if TYPE_CHECKING: + # populated by the metaclass, defined here to help IDEs only + __fields__: ClassVar[Dict[str, ModelField]] = {} + __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __validators__: ClassVar[Dict[str, AnyCallable]] = {} + __pre_root_validators__: ClassVar[List[AnyCallable]] + __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] + __config__: ClassVar[Type[BaseConfig]] = BaseConfig + __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x + __schema_cache__: ClassVar['DictAny'] = {} + __custom_root_type__: ClassVar[bool] = False + __signature__: ClassVar['Signature'] + __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] + __class_vars__: ClassVar[SetStr] + __fields_set__: ClassVar[SetStr] = set() + + Config = BaseConfig + __slots__ = ('__dict__', '__fields_set__') + __doc__ = '' # Null out the Representation docstring + + def __init__(__pydantic_self__, **data: Any) -> None: + """ + Create a new model by parsing and validating input data from keyword arguments. + + Raises ValidationError if the input data cannot be parsed to form a valid model. + """ + # Uses something other than `self` the first arg to allow "self" as a settable attribute + values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) + if validation_error: + raise validation_error + try: + object_setattr(__pydantic_self__, '__dict__', values) + except TypeError as e: + raise TypeError( + 'Model values must be a dict; you may not have returned a dictionary from a root validator' + ) from e + object_setattr(__pydantic_self__, '__fields_set__', fields_set) + __pydantic_self__._init_private_attributes() + + @no_type_check + def __setattr__(self, name, value): # noqa: C901 (ignore complexity) + if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: + return object_setattr(self, name, value) + + if self.__config__.extra is not Extra.allow and name not in self.__fields__: + raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') + elif not self.__config__.allow_mutation or self.__config__.frozen: + raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') + elif name in self.__fields__ and self.__fields__[name].final: + raise TypeError( + f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' + ) + elif self.__config__.validate_assignment: + new_values = {**self.__dict__, name: value} + + for validator in self.__pre_root_validators__: + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) + + known_field = self.__fields__.get(name, None) + if known_field: + # We want to + # - make sure validators are called without the current value for this field inside `values` + # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) + # - keep the order of the fields + if not known_field.field_info.allow_mutation: + raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') + dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} + value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + else: + new_values[name] = value + + errors = [] + for skip_on_failure, validator in self.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + if errors: + raise ValidationError(errors, self.__class__) + + # update the whole __dict__ as other values than just `value` + # may be changed (e.g. with `root_validator`) + object_setattr(self, '__dict__', new_values) + else: + self.__dict__[name] = value + + self.__fields_set__.add(name) + + def __getstate__(self) -> 'DictAny': + private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) + return { + '__dict__': self.__dict__, + '__fields_set__': self.__fields_set__, + '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, + } + + def __setstate__(self, state: 'DictAny') -> None: + object_setattr(self, '__dict__', state['__dict__']) + object_setattr(self, '__fields_set__', state['__fields_set__']) + for name, value in state.get('__private_attribute_values__', {}).items(): + object_setattr(self, name, value) + + def _init_private_attributes(self) -> None: + for name, private_attr in self.__private_attributes__.items(): + default = private_attr.get_default() + if default is not Undefined: + object_setattr(self, name, default) + + def dict( + self, + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> 'DictStrAny': + """ + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + + return dict( + self._iter( + to_dict=True, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + + def json( + self, + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + encoder: Optional[Callable[[Any], Any]] = None, + models_as_dict: bool = True, + **dumps_kwargs: Any, + ) -> str: + """ + Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. + + `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) + + # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` + # because we want to be able to keep raw `BaseModel` instances and not as `dict`. + # This allows users to write custom JSON encoders for given `BaseModel` classes. + data = dict( + self._iter( + to_dict=models_as_dict, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + if self.__custom_root_type__: + data = data[ROOT_KEY] + return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) + + @classmethod + def _enforce_dict_if_root(cls, obj: Any) -> Any: + if cls.__custom_root_type__ and ( + not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) + or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES + ): + return {ROOT_KEY: obj} + else: + return obj + + @classmethod + def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': + obj = cls._enforce_dict_if_root(obj) + if not isinstance(obj, dict): + try: + obj = dict(obj) + except (TypeError, ValueError) as e: + exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}') + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e + return cls(**obj) + + @classmethod + def parse_raw( + cls: Type['Model'], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + ) -> 'Model': + try: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + except (ValueError, TypeError, UnicodeDecodeError) as e: + raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) + return cls.parse_obj(obj) + + @classmethod + def parse_file( + cls: Type['Model'], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + ) -> 'Model': + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + return cls.parse_obj(obj) + + @classmethod + def from_orm(cls: Type['Model'], obj: Any) -> 'Model': + if not cls.__config__.orm_mode: + raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') + obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) + m = cls.__new__(cls) + values, fields_set, validation_error = validate_model(cls, obj) + if validation_error: + raise validation_error + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + m._init_private_attributes() + return m + + @classmethod + def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': + """ + Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. + Default values are respected, but no other validation is performed. + Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + """ + m = cls.__new__(cls) + fields_values: Dict[str, Any] = {} + for name, field in cls.__fields__.items(): + if field.alt_alias and field.alias in values: + fields_values[name] = values[field.alias] + elif name in values: + fields_values[name] = values[name] + elif not field.required: + fields_values[name] = field.get_default() + fields_values.update(values) + object_setattr(m, '__dict__', fields_values) + if _fields_set is None: + _fields_set = set(values.keys()) + object_setattr(m, '__fields_set__', _fields_set) + m._init_private_attributes() + return m + + def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model': + if deep: + # chances of having empty dict here are quite low for using smart_deepcopy + values = deepcopy(values) + + cls = self.__class__ + m = cls.__new__(cls) + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + for name in self.__private_attributes__: + value = getattr(self, name, Undefined) + if value is not Undefined: + if deep: + value = deepcopy(value) + object_setattr(m, name, value) + + return m + + def copy( + self: 'Model', + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + update: Optional['DictStrAny'] = None, + deep: bool = False, + ) -> 'Model': + """ + Duplicate a model, optionally choose which fields to include, exclude and change. + + :param include: fields to include in new model + :param exclude: fields to exclude from new model, as with values this takes precedence over include + :param update: values to change/add in the new model. Note: the data is not validated before creating + the new model: you should trust this data + :param deep: set to `True` to make a deep copy of the model + :return: new model instance + """ + + values = dict( + self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), + **(update or {}), + ) + + # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg + if update: + fields_set = self.__fields_set__ | update.keys() + else: + fields_set = set(self.__fields_set__) + + return self._copy_and_set_values(values, fields_set, deep=deep) + + @classmethod + def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny': + cached = cls.__schema_cache__.get((by_alias, ref_template)) + if cached is not None: + return cached + s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) + cls.__schema_cache__[(by_alias, ref_template)] = s + return s + + @classmethod + def schema_json( + cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any + ) -> str: + from .json import pydantic_encoder + + return cls.__config__.json_dumps( + cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls: Type['Model'], value: Any) -> 'Model': + if isinstance(value, cls): + copy_on_model_validation = cls.__config__.copy_on_model_validation + # whether to deep or shallow copy the model on validation, None means do not copy + deep_copy: Optional[bool] = None + if copy_on_model_validation not in {'deep', 'shallow', 'none'}: + # Warn about deprecated behavior + warnings.warn( + "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning + ) + if copy_on_model_validation: + deep_copy = False + + if copy_on_model_validation == 'shallow': + # shallow copy + deep_copy = False + elif copy_on_model_validation == 'deep': + # deep copy + deep_copy = True + + if deep_copy is None: + return value + else: + return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) + + value = cls._enforce_dict_if_root(value) + + if isinstance(value, dict): + return cls(**value) + elif cls.__config__.orm_mode: + return cls.from_orm(value) + else: + try: + value_as_dict = dict(value) + except (TypeError, ValueError) as e: + raise DictError() from e + return cls(**value_as_dict) + + @classmethod + def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: + if isinstance(obj, GetterDict): + return obj + return cls.__config__.getter_dict(obj) + + @classmethod + @no_type_check + def _get_value( + cls, + v: Any, + to_dict: bool, + by_alias: bool, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude_unset: bool, + exclude_defaults: bool, + exclude_none: bool, + ) -> Any: + + if isinstance(v, BaseModel): + if to_dict: + v_dict = v.dict( + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=include, + exclude=exclude, + exclude_none=exclude_none, + ) + if ROOT_KEY in v_dict: + return v_dict[ROOT_KEY] + return v_dict + else: + return v.copy(include=include, exclude=exclude) + + value_exclude = ValueItems(v, exclude) if exclude else None + value_include = ValueItems(v, include) if include else None + + if isinstance(v, dict): + return { + k_: cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(k_), + exclude=value_exclude and value_exclude.for_element(k_), + exclude_none=exclude_none, + ) + for k_, v_ in v.items() + if (not value_exclude or not value_exclude.is_excluded(k_)) + and (not value_include or value_include.is_included(k_)) + } + + elif sequence_like(v): + seq_args = ( + cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(i), + exclude=value_exclude and value_exclude.for_element(i), + exclude_none=exclude_none, + ) + for i, v_ in enumerate(v) + if (not value_exclude or not value_exclude.is_excluded(i)) + and (not value_include or value_include.is_included(i)) + ) + + return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) + + elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False): + return v.value + + else: + return v + + @classmethod + def __try_update_forward_refs__(cls, **localns: Any) -> None: + """ + Same as update_forward_refs but will not raise exception + when forward references are not defined. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this Model, globalns and localns. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) + + def __iter__(self) -> 'TupleGenerator': + """ + so `dict(model)` works + """ + yield from self.__dict__.items() + + def _iter( + self, + to_dict: bool = False, + by_alias: bool = False, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> 'TupleGenerator': + + # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. + # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. + if exclude is not None or self.__exclude_fields__ is not None: + exclude = ValueItems.merge(self.__exclude_fields__, exclude) + + if include is not None or self.__include_fields__ is not None: + include = ValueItems.merge(self.__include_fields__, include, intersect=True) + + allowed_keys = self._calculate_keys( + include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore + ) + if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): + # huge boost for plain _iter() + yield from self.__dict__.items() + return + + value_exclude = ValueItems(self, exclude) if exclude is not None else None + value_include = ValueItems(self, include) if include is not None else None + + for field_key, v in self.__dict__.items(): + if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): + continue + + if exclude_defaults: + model_field = self.__fields__.get(field_key) + if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v: + continue + + if by_alias and field_key in self.__fields__: + dict_key = self.__fields__[field_key].alias + else: + dict_key = field_key + + if to_dict or value_include or value_exclude: + v = self._get_value( + v, + to_dict=to_dict, + by_alias=by_alias, + include=value_include and value_include.for_element(field_key), + exclude=value_exclude and value_exclude.for_element(field_key), + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + yield dict_key, v + + def _calculate_keys( + self, + include: Optional['MappingIntStrAny'], + exclude: Optional['MappingIntStrAny'], + exclude_unset: bool, + update: Optional['DictStrAny'] = None, + ) -> Optional[AbstractSet[str]]: + if include is None and exclude is None and exclude_unset is False: + return None + + keys: AbstractSet[str] + if exclude_unset: + keys = self.__fields_set__.copy() + else: + keys = self.__dict__.keys() + + if include is not None: + keys &= include.keys() + + if update: + keys -= update.keys() + + if exclude: + keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} + + return keys + + def __eq__(self, other: Any) -> bool: + if isinstance(other, BaseModel): + return self.dict() == other.dict() + else: + return self.dict() == other + + def __repr_args__(self) -> 'ReprArgs': + return [ + (k, v) + for k, v in self.__dict__.items() + if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) + ] + + +_is_base_model_class_defined = True + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: None = None, + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type['BaseModel']: + ... + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[Type['Model'], Tuple[Type['Model'], ...]], + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type['Model']: + ... + + +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None, + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + __slots__: Optional[Tuple[str, ...]] = None, + **field_definitions: Any, +) -> Type['Model']: + """ + Dynamically create a model. + :param __model_name: name of the created model + :param __config__: config class to use for the new model + :param __base__: base class for the new model to inherit from + :param __module__: module of the created model + :param __validators__: a dict of method names and @validator class methods + :param __cls_kwargs__: a dict for class creation + :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` + :param field_definitions: fields of the model (or extra fields if a base is supplied) + in the format `=(, )` or `=, e.g. + `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format + `=` or `=(, )`, e.g. + `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or + `foo=(str, FieldInfo(title='Foo'))` + """ + if __slots__ is not None: + # __slots__ will be ignored from here on + warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) + + if __base__ is not None: + if __config__ is not None: + raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') + if not isinstance(__base__, tuple): + __base__ = (__base__,) + else: + __base__ = (cast(Type['Model'], BaseModel),) + + __cls_kwargs__ = __cls_kwargs__ or {} + + fields = {} + annotations = {} + + for f_name, f_def in field_definitions.items(): + if not is_valid_field(f_name): + warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) + if isinstance(f_def, tuple): + try: + f_annotation, f_value = f_def + except ValueError as e: + raise ConfigError( + 'field definitions should either be a tuple of (, ) or just a ' + 'default value, unfortunately this means tuples as ' + 'default values are not allowed' + ) from e + else: + f_annotation, f_value = None, f_def + + if f_annotation: + annotations[f_name] = f_annotation + fields[f_name] = f_value + + namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} + if __validators__: + namespace.update(__validators__) + namespace.update(fields) + if __config__: + namespace['Config'] = inherit_config(__config__, BaseConfig) + resolved_bases = resolve_bases(__base__) + meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) + if resolved_bases is not __base__: + ns['__orig_bases__'] = __base__ + namespace.update(ns) + return meta(__model_name, resolved_bases, namespace, **kwds) + + +_missing = object() + + +def validate_model( # noqa: C901 (ignore complexity) + model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None +) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: + """ + validate data against a model. + """ + values = {} + errors = [] + # input_data names, possibly alias + names_used = set() + # field names, never aliases + fields_set = set() + config = model.__config__ + check_extra = config.extra is not Extra.ignore + cls_ = cls or model + + for validator in model.__pre_root_validators__: + try: + input_data = validator(cls_, input_data) + except (ValueError, TypeError, AssertionError) as exc: + return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) + + for name, field in model.__fields__.items(): + value = input_data.get(field.alias, _missing) + using_name = False + if value is _missing and config.allow_population_by_field_name and field.alt_alias: + value = input_data.get(field.name, _missing) + using_name = True + + if value is _missing: + if field.required: + errors.append(ErrorWrapper(MissingError(), loc=field.alias)) + continue + + value = field.get_default() + + if not config.validate_all and not field.validate_always: + values[name] = value + continue + else: + fields_set.add(name) + if check_extra: + names_used.add(field.name if using_name else field.alias) + + v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[name] = v_ + + if check_extra: + if isinstance(input_data, GetterDict): + extra = input_data.extra_keys() - names_used + else: + extra = input_data.keys() - names_used + if extra: + fields_set |= extra + if config.extra is Extra.allow: + for f in extra: + values[f] = input_data[f] + else: + for f in sorted(extra): + errors.append(ErrorWrapper(ExtraError(), loc=f)) + + for skip_on_failure, validator in model.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + values = validator(cls_, values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + + if errors: + return values, fields_set, ValidationError(errors, cls_) + else: + return values, fields_set, None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.cp37-win_amd64.pyd new file mode 100644 index 00000000..4c2adf0f Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.py new file mode 100644 index 00000000..26192590 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/mypy.py @@ -0,0 +1,837 @@ +from configparser import ConfigParser +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union + +from mypy.errorcodes import ErrorCode +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR2, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Context, + Decorator, + EllipsisExpr, + FuncBase, + FuncDef, + JsonDict, + MemberExpr, + NameExpr, + PassStmt, + PlaceholderNode, + RefExpr, + StrExpr, + SymbolNode, + SymbolTableNode, + TempNode, + TypeInfo, + TypeVarExpr, + Var, +) +from mypy.options import Options +from mypy.plugin import ( + CheckerPluginInterface, + ClassDefContext, + FunctionContext, + MethodContext, + Plugin, + SemanticAnalyzerPluginInterface, +) +from mypy.plugins import dataclasses +from mypy.semanal import set_callable_name # type: ignore +from mypy.server.trigger import make_wildcard_trigger +from mypy.types import ( + AnyType, + CallableType, + Instance, + NoneType, + Overloaded, + Type, + TypeOfAny, + TypeType, + TypeVarType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars +from mypy.util import get_unique_redefinition_name +from mypy.version import __version__ as mypy_version + +from pydantic.utils import is_valid_field + +try: + from mypy.types import TypeVarDef # type: ignore[attr-defined] +except ImportError: # pragma: no cover + # Backward-compatible with TypeVarDef from Mypy 0.910. + from mypy.types import TypeVarType as TypeVarDef + +CONFIGFILE_KEY = 'pydantic-mypy' +METADATA_KEY = 'pydantic-mypy-metadata' +BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' +BASESETTINGS_FULLNAME = 'pydantic.env_settings.BaseSettings' +FIELD_FULLNAME = 'pydantic.fields.Field' +DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' + + +def parse_mypy_version(version: str) -> Tuple[int, ...]: + return tuple(int(part) for part in version.split('+', 1)[0].split('.')) + + +BUILTINS_NAME = 'builtins' if parse_mypy_version(mypy_version) >= (0, 930) else '__builtins__' + + +def plugin(version: str) -> 'TypingType[Plugin]': + """ + `version` is the mypy version string + + We might want to use this to print a warning if the mypy version being used is + newer, or especially older, than we expect (or need). + """ + return PydanticPlugin + + +class PydanticPlugin(Plugin): + def __init__(self, options: Options) -> None: + self.plugin_config = PydanticPluginConfig(options) + super().__init__(options) + + def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': + sym = self.lookup_fully_qualified(fullname) + if sym and isinstance(sym.node, TypeInfo): # pragma: no branch + # No branching may occur if the mypy cache has not been cleared + if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro): + return self._pydantic_model_class_maker_callback + return None + + def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]': + sym = self.lookup_fully_qualified(fullname) + if sym and sym.fullname == FIELD_FULLNAME: + return self._pydantic_field_callback + return None + + def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: + if fullname.endswith('.from_orm'): + return from_orm_callback + return None + + def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + if fullname == DATACLASS_FULLNAME: + return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] + return None + + def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: + transformer = PydanticModelTransformer(ctx, self.plugin_config) + transformer.transform() + + def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type': + """ + Extract the type of the `default` argument from the Field function, and use it as the return type. + + In particular: + * Check whether the default and default_factory argument is specified. + * Output an error if both are specified. + * Retrieve the type of the argument which is specified, and use it as return type for the function. + """ + default_any_type = ctx.default_return_type + + assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' + assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' + default_args = ctx.args[0] + default_factory_args = ctx.args[1] + + if default_args and default_factory_args: + error_default_and_default_factory_specified(ctx.api, ctx.context) + return default_any_type + + if default_args: + default_type = ctx.arg_types[0][0] + default_arg = default_args[0] + + # Fallback to default Any type if the field is required + if not isinstance(default_arg, EllipsisExpr): + return default_type + + elif default_factory_args: + default_factory_type = ctx.arg_types[1][0] + + # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter + # Pydantic calls the default factory without any argument, so we retrieve the first item + if isinstance(default_factory_type, Overloaded): + if float(mypy_version) > 0.910: + default_factory_type = default_factory_type.items[0] + else: + # Mypy0.910 exposes the items of overloaded types in a function + default_factory_type = default_factory_type.items()[0] # type: ignore[operator] + + if isinstance(default_factory_type, CallableType): + return default_factory_type.ret_type + + return default_any_type + + +class PydanticPluginConfig: + __slots__ = ('init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields') + init_forbid_extra: bool + init_typed: bool + warn_required_dynamic_aliases: bool + warn_untyped_fields: bool + + def __init__(self, options: Options) -> None: + if options.config_file is None: # pragma: no cover + return + + toml_config = parse_toml(options.config_file) + if toml_config is not None: + config = toml_config.get('tool', {}).get('pydantic-mypy', {}) + for key in self.__slots__: + setting = config.get(key, False) + if not isinstance(setting, bool): + raise ValueError(f'Configuration value must be a boolean for key: {key}') + setattr(self, key, setting) + else: + plugin_config = ConfigParser() + plugin_config.read(options.config_file) + for key in self.__slots__: + setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) + setattr(self, key, setting) + + +def from_orm_callback(ctx: MethodContext) -> Type: + """ + Raise an error if orm_mode is not enabled + """ + model_type: Instance + if isinstance(ctx.type, CallableType) and isinstance(ctx.type.ret_type, Instance): + model_type = ctx.type.ret_type # called on the class + elif isinstance(ctx.type, Instance): + model_type = ctx.type # called on an instance (unusual, but still valid) + else: # pragma: no cover + detail = f'ctx.type: {ctx.type} (of type {ctx.type.__class__.__name__})' + error_unexpected_behavior(detail, ctx.api, ctx.context) + return ctx.default_return_type + pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) + if pydantic_metadata is None: + return ctx.default_return_type + orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') + if orm_mode is not True: + error_from_orm(get_name(model_type.type), ctx.api, ctx.context) + return ctx.default_return_type + + +class PydanticModelTransformer: + tracked_config_fields: Set[str] = { + 'extra', + 'allow_mutation', + 'frozen', + 'orm_mode', + 'allow_population_by_field_name', + 'alias_generator', + } + + def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: + self._ctx = ctx + self.plugin_config = plugin_config + + def transform(self) -> None: + """ + Configures the BaseModel subclass according to the plugin settings. + + In particular: + * determines the model config and fields, + * adds a fields-aware signature for the initializer and construct methods + * freezes the class if allow_mutation = False or frozen = True + * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses + """ + ctx = self._ctx + info = self._ctx.cls.info + + self.adjust_validator_signatures() + config = self.collect_config() + fields = self.collect_fields(config) + for field in fields: + if info[field.name].type is None: + if not ctx.api.final_iteration: + ctx.api.defer() + is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1]) + self.add_initializer(fields, config, is_settings) + self.add_construct_method(fields) + self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True) + info.metadata[METADATA_KEY] = { + 'fields': {field.name: field.serialize() for field in fields}, + 'config': config.set_values_dict(), + } + + def adjust_validator_signatures(self) -> None: + """When we decorate a function `f` with `pydantic.validator(...), mypy sees + `f` as a regular method taking a `self` instance, even though pydantic + internally wraps `f` with `classmethod` if necessary. + + Teach mypy this by marking any function whose outermost decorator is a + `validator()` call as a classmethod. + """ + for name, sym in self._ctx.cls.info.names.items(): + if isinstance(sym.node, Decorator): + first_dec = sym.node.original_decorators[0] + if ( + isinstance(first_dec, CallExpr) + and isinstance(first_dec.callee, NameExpr) + and first_dec.callee.fullname == 'pydantic.class_validators.validator' + ): + sym.node.func.is_class = True + + def collect_config(self) -> 'ModelConfigData': + """ + Collects the values of the config attributes that are used by the plugin, accounting for parent classes. + """ + ctx = self._ctx + cls = ctx.cls + config = ModelConfigData() + for stmt in cls.defs.body: + if not isinstance(stmt, ClassDef): + continue + if stmt.name == 'Config': + for substmt in stmt.defs.body: + if not isinstance(substmt, AssignmentStmt): + continue + config.update(self.get_config_update(substmt)) + if ( + config.has_alias_generator + and not config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) + for info in cls.info.mro[1:]: # 0 is the current class + if METADATA_KEY not in info.metadata: + continue + + # Each class depends on the set of fields in its ancestors + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) + for name, value in info.metadata[METADATA_KEY]['config'].items(): + config.setdefault(name, value) + return config + + def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: + """ + Collects the fields for the model, accounting for parent classes + """ + # First, collect fields belonging to the current class. + ctx = self._ctx + cls = self._ctx.cls + fields = [] # type: List[PydanticModelField] + known_fields = set() # type: Set[str] + for stmt in cls.defs.body: + if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation + continue + + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name): + continue + + if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: + error_untyped_fields(ctx.api, stmt) + + # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet + # continue + + sym = cls.info.names.get(lhs.name) + if sym is None: # pragma: no cover + # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) + # This is the same logic used in the dataclasses plugin + continue + + node = sym.node + if isinstance(node, PlaceholderNode): # pragma: no cover + # See the PlaceholderNode docstring for more detail about how this can occur + # Basically, it is an edge case when dealing with complex import logic + # This is the same logic used in the dataclasses plugin + continue + if not isinstance(node, Var): # pragma: no cover + # Don't know if this edge case still happens with the `is_valid_field` check above + # but better safe than sorry + continue + + # x: ClassVar[int] is ignored by dataclasses. + if node.is_classvar: + continue + + is_required = self.get_is_required(cls, stmt, lhs) + alias, has_dynamic_alias = self.get_alias_info(stmt) + if ( + has_dynamic_alias + and not model_config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) + fields.append( + PydanticModelField( + name=lhs.name, + is_required=is_required, + alias=alias, + has_dynamic_alias=has_dynamic_alias, + line=stmt.line, + column=stmt.column, + ) + ) + known_fields.add(lhs.name) + all_fields = fields.copy() + for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object + if METADATA_KEY not in info.metadata: + continue + + superclass_fields = [] + # Each class depends on the set of fields in its ancestors + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) + + for name, data in info.metadata[METADATA_KEY]['fields'].items(): + if name not in known_fields: + field = PydanticModelField.deserialize(info, data) + known_fields.add(name) + superclass_fields.append(field) + else: + (field,) = (a for a in all_fields if a.name == name) + all_fields.remove(field) + superclass_fields.append(field) + all_fields = superclass_fields + all_fields + return all_fields + + def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: + """ + Adds a fields-aware `__init__` method to the class. + + The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. + """ + ctx = self._ctx + typed = self.plugin_config.init_typed + use_alias = config.allow_population_by_field_name is not True + force_all_optional = is_settings or bool( + config.has_alias_generator and not config.allow_population_by_field_name + ) + init_arguments = self.get_field_arguments( + fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias + ) + if not self.should_init_forbid_extra(fields, config): + var = Var('kwargs') + init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + + if '__init__' not in ctx.cls.info.names: + add_method(ctx, '__init__', init_arguments, NoneType()) + + def add_construct_method(self, fields: List['PydanticModelField']) -> None: + """ + Adds a fully typed `construct` classmethod to the class. + + Similar to the fields-aware __init__ method, but always uses the field names (not aliases), + and does not treat settings fields as optional. + """ + ctx = self._ctx + set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) + optional_set_str = UnionType([set_str, NoneType()]) + fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) + construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) + construct_arguments = [fields_set_argument] + construct_arguments + + obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') + self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class + tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name + tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) + self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) + ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) + + # Backward-compatible with TypeVarDef from Mypy 0.910. + if isinstance(tvd, TypeVarType): + self_type = tvd + else: + self_type = TypeVarType(tvd) # type: ignore[call-arg] + + add_method( + ctx, + 'construct', + construct_arguments, + return_type=self_type, + self_type=self_type, + tvar_def=tvd, + is_classmethod=True, + ) + + def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: + """ + Marks all fields as properties so that attempts to set them trigger mypy errors. + + This is the same approach used by the attrs and dataclasses plugins. + """ + info = self._ctx.cls.info + for field in fields: + sym_node = info.names.get(field.name) + if sym_node is not None: + var = sym_node.node + assert isinstance(var, Var) + var.is_property = frozen + else: + var = field.to_var(info, use_alias=False) + var.info = info + var.is_property = frozen + var._fullname = get_fullname(info) + '.' + get_name(var) + info.names[get_name(var)] = SymbolTableNode(MDEF, var) + + def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: + """ + Determines the config update due to a single statement in the Config class definition. + + Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) + """ + lhs = substmt.lvalues[0] + if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): + return None + if lhs.name == 'extra': + if isinstance(substmt.rvalue, StrExpr): + forbid_extra = substmt.rvalue.value == 'forbid' + elif isinstance(substmt.rvalue, MemberExpr): + forbid_extra = substmt.rvalue.name == 'forbid' + else: + error_invalid_config_value(lhs.name, self._ctx.api, substmt) + return None + return ModelConfigData(forbid_extra=forbid_extra) + if lhs.name == 'alias_generator': + has_alias_generator = True + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': + has_alias_generator = False + return ModelConfigData(has_alias_generator=has_alias_generator) + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): + return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) + error_invalid_config_value(lhs.name, self._ctx.api, substmt) + return None + + @staticmethod + def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: + """ + Returns a boolean indicating whether the field defined in `stmt` is a required field. + """ + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only, so only non-required if Optional + value_type = get_proper_type(cls.info[lhs.name].type) + if isinstance(value_type, UnionType) and any(isinstance(item, NoneType) for item in value_type.items): + # Annotated as Optional, or otherwise having NoneType in the union + return False + return True + if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: + # The "default value" is a call to `Field`; at this point, the field is + # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory + # is specified. + for arg, name in zip(expr.args, expr.arg_names): + # If name is None, then this arg is the default because it is the only positonal argument. + if name is None or name == 'default': + return arg.__class__ is EllipsisExpr + if name == 'default_factory': + return False + return True + # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) + return isinstance(expr, EllipsisExpr) + + @staticmethod + def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: + """ + Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. + + `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. + If `has_dynamic_alias` is True, `alias` will be None. + """ + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only + return None, False + + if not ( + isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME + ): + # Assigned value is not a call to pydantic.fields.Field + return None, False + + for i, arg_name in enumerate(expr.arg_names): + if arg_name != 'alias': + continue + arg = expr.args[i] + if isinstance(arg, StrExpr): + return arg.value, False + else: + return None, True + return None, False + + def get_field_arguments( + self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool + ) -> List[Argument]: + """ + Helper function used during the construction of the `__init__` and `construct` method signatures. + + Returns a list of mypy Argument instances for use in the generated signatures. + """ + info = self._ctx.cls.info + arguments = [ + field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) + for field in fields + if not (use_alias and field.has_dynamic_alias) + ] + return arguments + + def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: + """ + Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature + + We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, + *unless* a required dynamic alias is present (since then we can't determine a valid signature). + """ + if not config.allow_population_by_field_name: + if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): + return False + if config.forbid_extra: + return True + return self.plugin_config.init_forbid_extra + + @staticmethod + def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: + """ + Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be + determined during static analysis. + """ + for field in fields: + if field.has_dynamic_alias: + return True + if has_alias_generator: + for field in fields: + if field.alias is None: + return True + return False + + +class PydanticModelField: + def __init__( + self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int + ): + self.name = name + self.is_required = is_required + self.alias = alias + self.has_dynamic_alias = has_dynamic_alias + self.line = line + self.column = column + + def to_var(self, info: TypeInfo, use_alias: bool) -> Var: + name = self.name + if use_alias and self.alias is not None: + name = self.alias + return Var(name, info[self.name].type) + + def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: + if typed and info[self.name].type is not None: + type_annotation = info[self.name].type + else: + type_annotation = AnyType(TypeOfAny.explicit) + return Argument( + variable=self.to_var(info, use_alias), + type_annotation=type_annotation, + initializer=None, + kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, + ) + + def serialize(self) -> JsonDict: + return self.__dict__ + + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': + return cls(**data) + + +class ModelConfigData: + def __init__( + self, + forbid_extra: Optional[bool] = None, + allow_mutation: Optional[bool] = None, + frozen: Optional[bool] = None, + orm_mode: Optional[bool] = None, + allow_population_by_field_name: Optional[bool] = None, + has_alias_generator: Optional[bool] = None, + ): + self.forbid_extra = forbid_extra + self.allow_mutation = allow_mutation + self.frozen = frozen + self.orm_mode = orm_mode + self.allow_population_by_field_name = allow_population_by_field_name + self.has_alias_generator = has_alias_generator + + def set_values_dict(self) -> Dict[str, Any]: + return {k: v for k, v in self.__dict__.items() if v is not None} + + def update(self, config: Optional['ModelConfigData']) -> None: + if config is None: + return + for k, v in config.set_values_dict().items(): + setattr(self, k, v) + + def setdefault(self, key: str, value: Any) -> None: + if getattr(self, key) is None: + setattr(self, key, value) + + +ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') +ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') +ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') +ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') +ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') +ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') + + +def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: + api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) + + +def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) + + +def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) + + +def error_unexpected_behavior(detail: str, api: CheckerPluginInterface, context: Context) -> None: # pragma: no cover + # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path + link = 'https://github.com/pydantic/pydantic/issues/new/choose' + full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' + full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' + api.fail(full_message, context, code=ERROR_UNEXPECTED) + + +def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) + + +def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: + api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) + + +def add_method( + ctx: ClassDefContext, + name: str, + args: List[Argument], + return_type: Type, + self_type: Optional[Type] = None, + tvar_def: Optional[TypeVarDef] = None, + is_classmethod: bool = False, + is_new: bool = False, + # is_staticmethod: bool = False, +) -> None: + """ + Adds a new method to a class. + + This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged + """ + info = ctx.cls.info + + # First remove any previously generated methods with the same name + # to avoid clashes and problems in the semantic analyzer. + if name in info.names: + sym = info.names[name] + if sym.plugin_generated and isinstance(sym.node, FuncDef): + ctx.cls.defs.body.remove(sym.node) # pragma: no cover + + self_type = self_type or fill_typevars(info) + if is_classmethod or is_new: + first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] + # elif is_staticmethod: + # first = [] + else: + self_type = self_type or fill_typevars(info) + first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] + args = first + args + arg_types, arg_names, arg_kinds = [], [], [] + for arg in args: + assert arg.type_annotation, 'All arguments must be fully typed.' + arg_types.append(arg.type_annotation) + arg_names.append(get_name(arg.variable)) + arg_kinds.append(arg.kind) + + function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function') + signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) + if tvar_def: + signature.variables = [tvar_def] + + func = FuncDef(name, args, Block([PassStmt()])) + func.info = info + func.type = set_callable_name(signature, func) + func.is_class = is_classmethod + # func.is_static = is_staticmethod + func._fullname = get_fullname(info) + '.' + name + func.line = info.line + + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + if is_classmethod: # or is_staticmethod: + func.is_decorated = True + v = Var(name, func.type) + v.info = info + v._fullname = func._fullname + # if is_classmethod: + v.is_classmethod = True + dec = Decorator(func, [NameExpr('classmethod')], v) + # else: + # v.is_staticmethod = True + # dec = Decorator(func, [NameExpr('staticmethod')], v) + + dec.line = info.line + sym = SymbolTableNode(MDEF, dec) + else: + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + + info.names[name] = sym + info.defn.defs.body.append(func) + + +def get_fullname(x: Union[FuncBase, SymbolNode]) -> str: + """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.fullname + if callable(fn): # pragma: no cover + return fn() + return fn + + +def get_name(x: Union[FuncBase, SymbolNode]) -> str: + """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.name + if callable(fn): # pragma: no cover + return fn() + return fn + + +def parse_toml(config_file: str) -> Optional[Dict[str, Any]]: + if not config_file.endswith('.toml'): + return None + + read_mode = 'rb' + try: + import tomli as toml_ + except ImportError: + # older versions of mypy have toml as a dependency, not tomli + read_mode = 'r' + try: + import toml as toml_ # type: ignore[no-redef] + except ImportError: # pragma: no cover + import warnings + + warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') + return None + + with open(config_file, read_mode) as rf: + return toml_.load(rf) # type: ignore[arg-type] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.cp37-win_amd64.pyd new file mode 100644 index 00000000..33c5f14f Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.py new file mode 100644 index 00000000..725e4f8a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/networks.py @@ -0,0 +1,745 @@ +import re +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, + _BaseAddress, + _BaseNetwork, +) +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + Generator, + List, + Match, + Optional, + Pattern, + Set, + Tuple, + Type, + Union, + cast, + no_type_check, +) +from urllib.parse import quote, quote_plus + +from . import errors +from .utils import Representation, update_not_none +from .validators import constr_length_validator, str_validator + +if TYPE_CHECKING: + import email_validator + from typing_extensions import TypedDict + + from .config import BaseConfig + from .fields import ModelField + from .typing import AnyCallable + + CallableGenerator = Generator[AnyCallable, None, None] + + class Parts(TypedDict, total=False): + scheme: str + user: Optional[str] + password: Optional[str] + ipv4: Optional[str] + ipv6: Optional[str] + domain: Optional[str] + port: Optional[str] + path: Optional[str] + query: Optional[str] + fragment: Optional[str] + + class HostParts(TypedDict, total=False): + host: str + tld: Optional[str] + host_type: Optional[str] + port: Optional[str] + rebuild: bool + +else: + email_validator = None + + class Parts(dict): + pass + + +NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] + +__all__ = [ + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'HttpUrl', + 'stricturl', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'validate_email', +] + +_url_regex_cache = None +_multi_host_url_regex_cache = None +_ascii_domain_regex_cache = None +_int_domain_regex_cache = None +_host_regex_cache = None + +_host_regex = ( + r'(?:' + r'(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4 + r'(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6 + r'(?P[^\s/:?#]+)' # domain, validation occurs later + r')?' + r'(?::(?P\d+))?' # port +) +_scheme_regex = r'(?:(?P[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A +_user_info_regex = r'(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?' +_path_regex = r'(?P/[^\s?#]*)?' +_query_regex = r'(?:\?(?P[^\s#]*))?' +_fragment_regex = r'(?:#(?P[^\s#]*))?' + + +def url_regex() -> Pattern[str]: + global _url_regex_cache + if _url_regex_cache is None: + _url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _url_regex_cache + + +def multi_host_url_regex() -> Pattern[str]: + """ + Compiled multi host url regex. + + Additionally to `url_regex` it allows to match multiple hosts. + E.g. host1.db.net,host2.db.net + """ + global _multi_host_url_regex_cache + if _multi_host_url_regex_cache is None: + _multi_host_url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}' + r'(?P([^/]*))' # validation occurs later + rf'{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _multi_host_url_regex_cache + + +def ascii_domain_regex() -> Pattern[str]: + global _ascii_domain_regex_cache + if _ascii_domain_regex_cache is None: + ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' + ascii_domain_ending = r'(?P\.[a-z]{2,63})?\.?' + _ascii_domain_regex_cache = re.compile( + fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE + ) + return _ascii_domain_regex_cache + + +def int_domain_regex() -> Pattern[str]: + global _int_domain_regex_cache + if _int_domain_regex_cache is None: + int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' + int_domain_ending = r'(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?' + _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE) + return _int_domain_regex_cache + + +def host_regex() -> Pattern[str]: + global _host_regex_cache + if _host_regex_cache is None: + _host_regex_cache = re.compile( + _host_regex, + re.IGNORECASE, + ) + return _host_regex_cache + + +class AnyUrl(str): + strip_whitespace = True + min_length = 1 + max_length = 2**16 + allowed_schemes: Optional[Collection[str]] = None + tld_required: bool = False + user_required: bool = False + host_required: bool = True + hidden_parts: Set[str] = set() + quote_plus: bool = False + + __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') + + @no_type_check + def __new__(cls, url: Optional[str], **kwargs) -> object: + return str.__new__(cls, cls.build(**kwargs) if url is None else url) + + def __init__( + self, + url: str, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + tld: Optional[str] = None, + host_type: str = 'domain', + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + ) -> None: + str.__init__(url) + self.scheme = scheme + self.user = user + self.password = password + self.host = host + self.tld = tld + self.host_type = host_type + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + @classmethod + def build( + cls, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: str, + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + **_kwargs: str, + ) -> str: + parts = Parts( + scheme=scheme, + user=user, + password=password, + host=host, + port=port, + path=path, + query=query, + fragment=fragment, + **_kwargs, # type: ignore[misc] + ) + + url = scheme + '://' + if user: + url += cls.quote(user) + if password: + url += ':' + cls.quote(password) + if user or password: + url += '@' + url += host + if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port): + url += ':' + port + if path: + url += '/'.join(map(cls.quote, path.split('/'))) + if query: + queries = query.split('&') + url += '?' + '&'.join(map(lambda s: '='.join(map(cls.quote, s.split('='))), queries)) + if fragment: + url += '#' + fragment + return url + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': + if value.__class__ == cls: + return value + value = str_validator(value) + if cls.strip_whitespace: + value = value.strip() + url: str = cast(str, constr_length_validator(value, field, config)) + + m = cls._match_url(url) + # the regex should always match, if it doesn't please report with details of the URL tried + assert m, 'URL regex failed unexpectedly' + + original_parts = cast('Parts', m.groupdict()) + parts = cls.apply_default_parts(original_parts) + parts = cls.validate_parts(parts) + + if m.end() != len(url): + raise errors.UrlExtraError(extra=url[m.end() :]) + + return cls._build_url(m, url, parts) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl': + """ + Validate hosts and build the AnyUrl object. Split from `validate` so this method + can be altered in `MultiHostDsn`. + """ + host, tld, host_type, rebuild = cls.validate_host(parts) + + return cls( + None if rebuild else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host, + tld=tld, + host_type=host_type, + port=parts['port'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return url_regex().match(url) + + @staticmethod + def _validate_port(port: Optional[str]) -> None: + if port is not None and int(port) > 65_535: + raise errors.UrlPortError() + + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + """ + A method used to validate parts of a URL. + Could be overridden to set default values for parts if missing + """ + scheme = parts['scheme'] + if scheme is None: + raise errors.UrlSchemeError() + + if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: + raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) + + if validate_port: + cls._validate_port(parts['port']) + + user = parts['user'] + if cls.user_required and user is None: + raise errors.UrlUserInfoError() + + return parts + + @classmethod + def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]: + tld, host_type, rebuild = None, None, False + for f in ('domain', 'ipv4', 'ipv6'): + host = parts[f] # type: ignore[literal-required] + if host: + host_type = f + break + + if host is None: + if cls.host_required: + raise errors.UrlHostError() + elif host_type == 'domain': + is_international = False + d = ascii_domain_regex().fullmatch(host) + if d is None: + d = int_domain_regex().fullmatch(host) + if d is None: + raise errors.UrlHostError() + is_international = True + + tld = d.group('tld') + if tld is None and not is_international: + d = int_domain_regex().fullmatch(host) + assert d is not None + tld = d.group('tld') + is_international = True + + if tld is not None: + tld = tld[1:] + elif cls.tld_required: + raise errors.UrlHostTldError() + + if is_international: + host_type = 'int_domain' + rebuild = True + host = host.encode('idna').decode('ascii') + if tld is not None: + tld = tld.encode('idna').decode('ascii') + + return host, tld, host_type, rebuild # type: ignore + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {} + + @classmethod + def apply_default_parts(cls, parts: 'Parts') -> 'Parts': + for key, value in cls.get_default_parts(parts).items(): + if not parts[key]: # type: ignore[literal-required] + parts[key] = value # type: ignore[literal-required] + return parts + + @classmethod + def quote(cls, string: str, safe: str = '') -> str: + return quote_plus(string, safe) if cls.quote_plus else quote(string, safe) + + def __repr__(self) -> str: + extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) + return f'{self.__class__.__name__}({super().__repr__()}, {extra})' + + +class AnyHttpUrl(AnyUrl): + allowed_schemes = {'http', 'https'} + + __slots__ = () + + +class HttpUrl(AnyHttpUrl): + tld_required = True + # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers + max_length = 2083 + hidden_parts = {'port'} + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {'port': '80' if parts['scheme'] == 'http' else '443'} + + +class FileUrl(AnyUrl): + allowed_schemes = {'file'} + host_required = False + + __slots__ = () + + +class MultiHostDsn(AnyUrl): + __slots__ = AnyUrl.__slots__ + ('hosts',) + + def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any): + super().__init__(*args, **kwargs) + self.hosts = hosts + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return multi_host_url_regex().match(url) + + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + return super().validate_parts(parts, validate_port=False) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn': + hosts_parts: List['HostParts'] = [] + host_re = host_regex() + for host in m.groupdict()['hosts'].split(','): + d: Parts = host_re.match(host).groupdict() # type: ignore + host, tld, host_type, rebuild = cls.validate_host(d) + port = d.get('port') + cls._validate_port(port) + hosts_parts.append( + { + 'host': host, + 'host_type': host_type, + 'tld': tld, + 'rebuild': rebuild, + 'port': port, + } + ) + + if len(hosts_parts) > 1: + return cls( + None if any([hp['rebuild'] for hp in hosts_parts]) else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + host_type=None, + hosts=hosts_parts, + ) + else: + # backwards compatibility with single host + host_part = hosts_parts[0] + return cls( + None if host_part['rebuild'] else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host_part['host'], + tld=host_part['tld'], + host_type=host_part['host_type'], + port=host_part.get('port'), + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) + + +class PostgresDsn(MultiHostDsn): + allowed_schemes = { + 'postgres', + 'postgresql', + 'postgresql+asyncpg', + 'postgresql+pg8000', + 'postgresql+psycopg2', + 'postgresql+psycopg2cffi', + 'postgresql+py-postgresql', + 'postgresql+pygresql', + } + user_required = True + + __slots__ = () + + +class CockroachDsn(AnyUrl): + allowed_schemes = { + 'cockroachdb', + 'cockroachdb+psycopg2', + 'cockroachdb+asyncpg', + } + user_required = True + + +class AmqpDsn(AnyUrl): + allowed_schemes = {'amqp', 'amqps'} + host_required = False + + +class RedisDsn(AnyUrl): + __slots__ = () + allowed_schemes = {'redis', 'rediss'} + host_required = False + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '', + 'port': '6379', + 'path': '/0', + } + + +class MongoDsn(AnyUrl): + allowed_schemes = {'mongodb'} + + # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'port': '27017', + } + + +class KafkaDsn(AnyUrl): + allowed_schemes = {'kafka'} + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost', + 'port': '9092', + } + + +def stricturl( + *, + strip_whitespace: bool = True, + min_length: int = 1, + max_length: int = 2**16, + tld_required: bool = True, + host_required: bool = True, + allowed_schemes: Optional[Collection[str]] = None, + quote_plus: bool = False, +) -> Type[AnyUrl]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + min_length=min_length, + max_length=max_length, + tld_required=tld_required, + host_required=host_required, + allowed_schemes=allowed_schemes, + quote_plus=quote_plus, + ) + return type('UrlValue', (AnyUrl,), namespace) + + +def import_email_validator() -> None: + global email_validator + try: + import email_validator + except ImportError as e: + raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e + + +class EmailStr(str): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='email') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + # included here and below so the error happens straight away + import_email_validator() + + yield str_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> str: + return validate_email(value)[1] + + +class NameEmail(Representation): + __slots__ = 'name', 'email' + + def __init__(self, name: str, email: str): + self.name = name + self.email = email + + def __eq__(self, other: Any) -> bool: + return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='name-email') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + import_email_validator() + + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> 'NameEmail': + if value.__class__ == cls: + return value + value = str_validator(value) + return cls(*validate_email(value)) + + def __str__(self) -> str: + return f'{self.name} <{self.email}>' + + +class IPvAnyAddress(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyaddress') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: + try: + return IPv4Address(value) + except ValueError: + pass + + try: + return IPv6Address(value) + except ValueError: + raise errors.IPvAnyAddressError() + + +class IPvAnyInterface(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyinterface') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: + try: + return IPv4Interface(value) + except ValueError: + pass + + try: + return IPv6Interface(value) + except ValueError: + raise errors.IPvAnyInterfaceError() + + +class IPvAnyNetwork(_BaseNetwork): # type: ignore + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanynetwork') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: + # Assume IP Network is defined with a default value for ``strict`` argument. + # Define your own class if you want to specify network address check strictness. + try: + return IPv4Network(value) + except ValueError: + pass + + try: + return IPv6Network(value) + except ValueError: + raise errors.IPvAnyNetworkError() + + +pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') + + +def validate_email(value: Union[str]) -> Tuple[str, str]: + """ + Brutally simple email address validation. Note unlike most email address validation + * raw ip address (literal) domain parts are not allowed. + * "John Doe " style "pretty" email addresses are processed + * the local part check is extremely basic. This raises the possibility of unicode spoofing, but no better + solution is really possible. + * spaces are striped from the beginning and end of addresses but no error is raised + + See RFC 5322 but treat it with suspicion, there seems to exist no universally acknowledged test for a valid email! + """ + if email_validator is None: + import_email_validator() + + m = pretty_email_regex.fullmatch(value) + name: Optional[str] = None + if m: + name, value = m.groups() + + email = value.strip() + + try: + email_validator.validate_email(email, check_deliverability=False) + except email_validator.EmailNotValidError as e: + raise errors.EmailError() from e + + at_index = email.index('@') + local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. + global_part = email[at_index:].lower() + + return name or local_part, local_part + global_part diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.cp37-win_amd64.pyd new file mode 100644 index 00000000..2c71c861 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.py new file mode 100644 index 00000000..7ac330ca --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/parse.py @@ -0,0 +1,66 @@ +import json +import pickle +from enum import Enum +from pathlib import Path +from typing import Any, Callable, Union + +from .types import StrBytes + + +class Protocol(str, Enum): + json = 'json' + pickle = 'pickle' + + +def load_str_bytes( + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + if proto is None and content_type: + if content_type.endswith(('json', 'javascript')): + pass + elif allow_pickle and content_type.endswith('pickle'): + proto = Protocol.pickle + else: + raise TypeError(f'Unknown content-type: {content_type}') + + proto = proto or Protocol.json + + if proto == Protocol.json: + if isinstance(b, bytes): + b = b.decode(encoding) + return json_loads(b) + elif proto == Protocol.pickle: + if not allow_pickle: + raise RuntimeError('Trying to decode with pickle with allow_pickle=False') + bb = b if isinstance(b, bytes) else b.encode() + return pickle.loads(bb) + else: + raise TypeError(f'Unknown protocol: {proto}') + + +def load_file( + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + path = Path(path) + b = path.read_bytes() + if content_type is None: + if path.suffix in ('.js', '.json'): + proto = Protocol.json + elif path.suffix == '.pkl': + proto = Protocol.pickle + + return load_str_bytes( + b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.cp37-win_amd64.pyd new file mode 100644 index 00000000..30974e35 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.py new file mode 100644 index 00000000..e7af56f1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/schema.py @@ -0,0 +1,1153 @@ +import re +import warnings +from collections import defaultdict +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + ForwardRef, + FrozenSet, + Generic, + Iterable, + List, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from uuid import UUID + +from typing_extensions import Annotated, Literal + +from .fields import ( + MAPPING_LIKE_SHAPES, + SHAPE_DEQUE, + SHAPE_FROZENSET, + SHAPE_GENERIC, + SHAPE_ITERABLE, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, +) +from .json import pydantic_encoder +from .networks import AnyUrl, EmailStr +from .types import ( + ConstrainedDecimal, + ConstrainedFloat, + ConstrainedFrozenSet, + ConstrainedInt, + ConstrainedList, + ConstrainedSet, + SecretBytes, + SecretStr, + StrictBytes, + StrictStr, + conbytes, + condecimal, + confloat, + confrozenset, + conint, + conlist, + conset, + constr, +) +from .typing import ( + all_literal_values, + get_args, + get_origin, + get_sub_types, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_union, +) +from .utils import ROOT_KEY, get_model, lenient_issubclass + +if TYPE_CHECKING: + from .dataclasses import Dataclass + from .main import BaseModel + +default_prefix = '#/definitions/' +default_ref_template = '#/definitions/{model}' + +TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] +TypeModelSet = Set[TypeModelOrEnum] + + +def _apply_modify_schema( + modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] +) -> None: + from inspect import signature + + sig = signature(modify_schema) + args = set(sig.parameters.keys()) + if 'field' in args or 'kwargs' in args: + modify_schema(field_schema, field=field) + else: + modify_schema(field_schema) + + +def schema( + models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], + *, + by_alias: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` + top-level JSON key, including their sub-models. + + :param models: a list of models to include in the generated JSON Schema + :param by_alias: generate the schemas using the aliases defined, if any + :param title: title for the generated schema that includes the definitions + :param description: description for the generated schema + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful + for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For + a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for + the models and sub-models passed in ``models``. + """ + clean_models = [get_model(model) for model in models] + flat_models = get_flat_models_from_models(clean_models) + model_name_map = get_model_name_map(flat_models) + definitions = {} + output_schema: Dict[str, Any] = {} + if title: + output_schema['title'] = title + if description: + output_schema['description'] = description + for model in clean_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + if definitions: + output_schema['definitions'] = definitions + return output_schema + + +def model_schema( + model: Union[Type['BaseModel'], Type['Dataclass']], + by_alias: bool = True, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level + JSON key. + + :param model: a Pydantic model (a class that inherits from BaseModel) + :param by_alias: generate the schemas using the aliases defined, if any + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema for the passed ``model`` + """ + model = get_model(model) + flat_models = get_flat_models_from_model(model) + model_name_map = get_model_name_map(flat_models) + model_name = model_name_map[model] + m_schema, m_definitions, nested_models = model_process_schema( + model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template + ) + if model_name in nested_models: + # model_name is in Nested models, it has circular references + m_definitions[model_name] = m_schema + m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) + if m_definitions: + m_schema.update({'definitions': m_definitions}) + return m_schema + + +def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: + + # If no title is explicitly set, we don't set title in the schema for enums. + # The behaviour is the same as `BaseModel` reference, where the default title + # is in the definitions part of the schema. + schema_: Dict[str, Any] = {} + if field.field_info.title or not lenient_issubclass(field.type_, Enum): + schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') + + if field.field_info.title: + schema_overrides = True + + if field.field_info.description: + schema_['description'] = field.field_info.description + schema_overrides = True + + if not field.required and field.default is not None and not is_callable_type(field.outer_type_): + schema_['default'] = encode_default(field.default) + schema_overrides = True + + return schema_, schema_overrides + + +def field_schema( + field: ModelField, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: TypeModelSet = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. + Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field + is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they + will be included in the definitions and referenced in the schema instead of included recursively. + + :param field: a Pydantic ``ModelField`` + :param by_alias: use the defined alias (if any) in the returned schema + :param model_name_map: used to generate the JSON Schema references to other models included in the definitions + :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of + #/definitions/ will be used + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :param known_models: used to solve circular references + :return: tuple of the schema for this field and additional definitions + """ + s, schema_overrides = get_field_info_schema(field) + + validation_schema = get_field_schema_validations(field) + if validation_schema: + s.update(validation_schema) + schema_overrides = True + + f_schema, f_definitions, f_nested_models = field_type_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models or set(), + ) + + # $ref will only be returned when there are no schema_overrides + if '$ref' in f_schema: + return f_schema, f_definitions, f_nested_models + else: + s.update(f_schema) + return s, f_definitions, f_nested_models + + +numeric_types = (int, float, Decimal) +_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('max_length', numeric_types, 'maxLength'), + ('min_length', numeric_types, 'minLength'), + ('regex', str, 'pattern'), +) + +_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('gt', numeric_types, 'exclusiveMinimum'), + ('lt', numeric_types, 'exclusiveMaximum'), + ('ge', numeric_types, 'minimum'), + ('le', numeric_types, 'maximum'), + ('multiple_of', numeric_types, 'multipleOf'), +) + + +def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: + """ + Get the JSON Schema validation keywords for a ``field`` with an annotation of + a Pydantic ``FieldInfo`` with validation arguments. + """ + f_schema: Dict[str, Any] = {} + + if lenient_issubclass(field.type_, Enum): + # schema is already updated by `enum_process_schema`; just update with field extra + if field.field_info.extra: + f_schema.update(field.field_info.extra) + return f_schema + + if lenient_issubclass(field.type_, (str, bytes)): + for attr_name, t, keyword in _str_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): + for attr_name, t, keyword in _numeric_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + if field.field_info.extra: + f_schema.update(field.field_info.extra) + modify_schema = getattr(field.outer_type_, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema + + +def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: + """ + Process a set of models and generate unique names for them to be used as keys in the JSON Schema + definitions. By default the names are the same as the class name. But if two models in different Python + modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be + based on the Python module path for those conflicting models to prevent name collisions. + + :param unique_models: a Python set of models + :return: dict mapping models to names + """ + name_model_map = {} + conflicting_names: Set[str] = set() + for model in unique_models: + model_name = normalize_name(model.__name__) + if model_name in conflicting_names: + model_name = get_long_model_name(model) + name_model_map[model_name] = model + elif model_name in name_model_map: + conflicting_names.add(model_name) + conflicting_model = name_model_map.pop(model_name) + name_model_map[get_long_model_name(conflicting_model)] = conflicting_model + name_model_map[get_long_model_name(model)] = model + else: + name_model_map[model_name] = model + return {v: k for k, v in name_model_map.items()} + + +def get_flat_models_from_model(model: Type['BaseModel'], known_models: TypeModelSet = None) -> TypeModelSet: + """ + Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass + model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also + subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), + the return value will be ``set([Foo, Bar, Baz])``. + + :param model: a Pydantic ``BaseModel`` subclass + :param known_models: used to solve circular references + :return: a set with the initial model and all its sub-models + """ + known_models = known_models or set() + flat_models: TypeModelSet = set() + flat_models.add(model) + known_models |= flat_models + fields = cast(Sequence[ModelField], model.__fields__.values()) + flat_models |= get_flat_models_from_fields(fields, known_models=known_models) + return flat_models + + +def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: + """ + Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a sublcass of BaseModel + (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. + I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that + model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of + type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param field: a Pydantic ``ModelField`` + :param known_models: used to solve circular references + :return: a set with the model used in the declaration for this field, if any, and all its sub-models + """ + from .main import BaseModel + + flat_models: TypeModelSet = set() + + field_type = field.type_ + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if field.sub_fields and not lenient_issubclass(field_type, BaseModel): + flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) + elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: + flat_models |= get_flat_models_from_model(field_type, known_models=known_models) + elif lenient_issubclass(field_type, Enum): + flat_models.add(field_type) + return flat_models + + +def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: + """ + Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` + (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. + I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a + field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also + subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param fields: a list of Pydantic ``ModelField``s + :param known_models: used to solve circular references + :return: a set with any model declared in the fields, and all their sub-models + """ + flat_models: TypeModelSet = set() + for field in fields: + flat_models |= get_flat_models_from_field(field, known_models=known_models) + return flat_models + + +def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: + """ + Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass + a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has + a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + """ + flat_models: TypeModelSet = set() + for model in models: + flat_models |= get_flat_models_from_model(model) + return flat_models + + +def get_long_model_name(model: TypeModelOrEnum) -> str: + return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') + + +def field_type_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``field_schema()``, you probably should be using that function. + + Take a single ``field`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + from .main import BaseModel # noqa: F811 + + definitions = {} + nested_models: Set[str] = set() + f_schema: Dict[str, Any] + if field.shape in { + SHAPE_LIST, + SHAPE_TUPLE_ELLIPSIS, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_ITERABLE, + SHAPE_DEQUE, + }: + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + f_schema = {'type': 'array', 'items': items_schema} + if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: + f_schema['uniqueItems'] = True + + elif field.shape in MAPPING_LIKE_SHAPES: + f_schema = {'type': 'object'} + key_field = cast(ModelField, field.key_field) + regex = getattr(key_field.type_, 'regex', None) + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if regex: + # Dict keys have a regex pattern + # items_schema might be a schema or empty dict, add it either way + f_schema['patternProperties'] = {regex.pattern: items_schema} + elif items_schema: + # The dict values are not simply Any, so they need a schema + f_schema['additionalProperties'] = items_schema + elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): + sub_schema = [] + sub_fields = cast(List[ModelField], field.sub_fields) + for sf in sub_fields: + sf_schema, sf_definitions, sf_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sf_definitions) + nested_models.update(sf_nested_models) + sub_schema.append(sf_schema) + + sub_fields_len = len(sub_fields) + if field.shape == SHAPE_GENERIC: + all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} + f_schema = {'allOf': [all_of_schemas]} + else: + f_schema = { + 'type': 'array', + 'minItems': sub_fields_len, + 'maxItems': sub_fields_len, + } + if sub_fields_len >= 1: + f_schema['items'] = sub_schema + else: + assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape + f_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + + # check field type to avoid repeated calls to the same __modify_schema__ method + if field.type_ != field.outer_type_: + if field.shape == SHAPE_GENERIC: + field_type = field.type_ + else: + field_type = field.outer_type_ + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema, definitions, nested_models + + +def model_process_schema( + model: TypeModelOrEnum, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: TypeModelSet = None, + field: Optional[ModelField] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``model_schema()``, you probably should be using that function. + + Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The + sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All + the definitions are returned as the second value. + """ + from inspect import getdoc, signature + + known_models = known_models or set() + if lenient_issubclass(model, Enum): + model = cast(Type[Enum], model) + s = enum_process_schema(model, field=field) + return s, {}, set() + model = cast(Type['BaseModel'], model) + s = {'title': model.__config__.title or model.__name__} + doc = getdoc(model) + if doc: + s['description'] = doc + known_models.add(model) + m_schema, m_definitions, nested_models = model_type_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + s.update(m_schema) + schema_extra = model.__config__.schema_extra + if callable(schema_extra): + if len(signature(schema_extra).parameters) == 1: + schema_extra(s) + else: + schema_extra(s, model) + else: + s.update(schema_extra) + return s, m_definitions, nested_models + + +def model_type_schema( + model: Type['BaseModel'], + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + You probably should be using ``model_schema()``, this function is indirectly used by that function. + + Take a single ``model`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + properties = {} + required = [] + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + for k, f in model.__fields__.items(): + try: + f_schema, f_definitions, f_nested_models = field_schema( + f, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + except SkipField as skip: + warnings.warn(skip.message, UserWarning) + continue + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if by_alias: + properties[f.alias] = f_schema + if f.required: + required.append(f.alias) + else: + properties[k] = f_schema + if f.required: + required.append(k) + if ROOT_KEY in properties: + out_schema = properties[ROOT_KEY] + out_schema['title'] = model.__config__.title or model.__name__ + else: + out_schema = {'type': 'object', 'properties': properties} + if required: + out_schema['required'] = required + if model.__config__.extra == 'forbid': + out_schema['additionalProperties'] = False + return out_schema, definitions, nested_models + + +def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: + """ + Take a single `enum` and generate its schema. + + This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. + """ + schema_: Dict[str, Any] = { + 'title': enum.__name__, + # Python assigns all enums a default docstring value of 'An enumeration', so + # all enums will have a description field even if not explicitly provided. + 'description': enum.__doc__ or 'An enumeration.', + # Add enum values and the enum field type to the schema. + 'enum': [item.value for item in cast(Iterable[Enum], enum)], + } + + add_field_type_to_schema(enum, schema_) + + modify_schema = getattr(enum, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, schema_) + + return schema_ + + +def field_singleton_sub_fields_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you probably should be using that function. + + Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their + schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. + """ + sub_fields = cast(List[ModelField], field.sub_fields) + definitions = {} + nested_models: Set[str] = set() + if len(sub_fields) == 1: + return field_type_schema( + sub_fields[0], + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + else: + s: Dict[str, Any] = {} + # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object + field_has_discriminator: bool = field.discriminator_key is not None + if field_has_discriminator: + assert field.sub_fields_mapping is not None + + discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} + + for discriminator_value, sub_field in field.sub_fields_mapping.items(): + # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many + if is_union(get_origin(sub_field.type_)): + sub_models = get_sub_types(sub_field.type_) + discriminator_models_refs[discriminator_value] = { + model_name_map[sub_model]: get_schema_ref( + model_name_map[sub_model], ref_prefix, ref_template, False + ) + for sub_model in sub_models + } + else: + sub_field_type = sub_field.type_ + if hasattr(sub_field_type, '__pydantic_model__'): + sub_field_type = sub_field_type.__pydantic_model__ + + discriminator_model_name = model_name_map[sub_field_type] + discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) + discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] + + s['discriminator'] = { + 'propertyName': field.discriminator_alias, + 'mapping': discriminator_models_refs, + } + + sub_field_schemas = [] + for sf in sub_fields: + sub_schema, sub_definitions, sub_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sub_definitions) + if schema_overrides and 'allOf' in sub_schema: + # if the sub_field is a referenced schema we only need the referenced + # object. Otherwise we will end up with several allOf inside anyOf/oneOf. + # See https://github.com/pydantic/pydantic/issues/1209 + sub_schema = sub_schema['allOf'][0] + + if sub_schema.keys() == {'discriminator', 'oneOf'}: + # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere + sub_schema.pop('discriminator') + sub_field_schemas.append(sub_schema) + nested_models.update(sub_nested_models) + s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas + return s, definitions, nested_models + + +# Order is important, e.g. subclasses of str must go before str +# this is used only for standard library types, custom types should use __modify_schema__ instead +field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( + (Path, {'type': 'string', 'format': 'path'}), + (datetime, {'type': 'string', 'format': 'date-time'}), + (date, {'type': 'string', 'format': 'date'}), + (time, {'type': 'string', 'format': 'time'}), + (timedelta, {'type': 'number', 'format': 'time-delta'}), + (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), + (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), + (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), + (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), + (IPv4Address, {'type': 'string', 'format': 'ipv4'}), + (IPv6Address, {'type': 'string', 'format': 'ipv6'}), + (Pattern, {'type': 'string', 'format': 'regex'}), + (str, {'type': 'string'}), + (bytes, {'type': 'string', 'format': 'binary'}), + (bool, {'type': 'boolean'}), + (int, {'type': 'integer'}), + (float, {'type': 'number'}), + (Decimal, {'type': 'number'}), + (UUID, {'type': 'string', 'format': 'uuid'}), + (dict, {'type': 'object'}), + (list, {'type': 'array', 'items': {}}), + (tuple, {'type': 'array', 'items': {}}), + (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), + (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}), +) + +json_scheme = {'type': 'string', 'format': 'json-string'} + + +def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: + """ + Update the given `schema` with the type-specific metadata for the given `field_type`. + + This function looks through `field_class_to_schema` for a class that matches the given `field_type`, + and then modifies the given `schema` with the information from that type. + """ + for type_, t_schema in field_class_to_schema: + # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class + if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: + schema_.update(t_schema) + break + + +def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: + if ref_prefix: + schema_ref = {'$ref': ref_prefix + name} + else: + schema_ref = {'$ref': ref_template.format(model=name)} + return {'allOf': [schema_ref]} if schema_overrides else schema_ref + + +def field_singleton_schema( # noqa: C901 (ignore complexity) + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you should probably be using that function. + + Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. + """ + from .main import BaseModel + + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + field_type = field.type_ + + # Recurse into this field if it contains sub_fields and is NOT a + # BaseModel OR that BaseModel is a const + if field.sub_fields and ( + (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) + ): + return field_singleton_sub_fields_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: + return {}, definitions, nested_models # no restrictions + if is_none_type(field_type): + return {'type': 'null'}, definitions, nested_models + if is_callable_type(field_type): + raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') + f_schema: Dict[str, Any] = {} + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + + if is_literal_type(field_type): + values = all_literal_values(field_type) + + if len({v.__class__ for v in values}) > 1: + return field_schema( + multitypes_literal_field_for_schema(values, field), + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + + # All values have the same type + field_type = values[0].__class__ + f_schema['enum'] = list(values) + add_field_type_to_schema(field_type, f_schema) + elif lenient_issubclass(field_type, Enum): + enum_name = model_name_map[field_type] + f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) + f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) + definitions[enum_name] = enum_process_schema(field_type, field=field) + elif is_namedtuple(field_type): + sub_schema, *_ = model_process_schema( + field_type.__pydantic_model__, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + items_schemas = list(sub_schema['properties'].values()) + f_schema.update( + { + 'type': 'array', + 'items': items_schemas, + 'minItems': len(items_schemas), + 'maxItems': len(items_schemas), + } + ) + elif not hasattr(field_type, '__pydantic_model__'): + add_field_type_to_schema(field_type, f_schema) + + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + + if f_schema: + return f_schema, definitions, nested_models + + # Handle dataclass-based models + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if issubclass(field_type, BaseModel): + model_name = model_name_map[field_type] + if field_type not in known_models: + sub_schema, sub_definitions, sub_nested_models = model_process_schema( + field_type, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + definitions.update(sub_definitions) + definitions[model_name] = sub_schema + nested_models.update(sub_nested_models) + else: + nested_models.add(model_name) + schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) + return schema_ref, definitions, nested_models + + # For generics with no args + args = get_args(field_type) + if args is not None and not args and Generic in field_type.__bases__: + return f_schema, definitions, nested_models + + raise ValueError(f'Value not declarable with JSON Schema, field: {field}') + + +def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: + """ + To support `Literal` with values of different types, we split it into multiple `Literal` with same type + e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` + """ + literal_distinct_types = defaultdict(list) + for v in values: + literal_distinct_types[v.__class__].append(v) + distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) + + return ModelField( + name=field.name, + type_=Union[tuple(distinct_literals)], # type: ignore + class_validators=field.class_validators, + model_config=field.model_config, + default=field.default, + required=field.required, + alias=field.alias, + field_info=field.field_info, + ) + + +def encode_default(dft: Any) -> Any: + if isinstance(dft, Enum): + return dft.value + elif isinstance(dft, (int, float, str)): + return dft + elif isinstance(dft, (list, tuple)): + t = dft.__class__ + seq_args = (encode_default(v) for v in dft) + return t(*seq_args) if is_namedtuple(t) else t(seq_args) + elif isinstance(dft, dict): + return {encode_default(k): encode_default(v) for k, v in dft.items()} + elif dft is None: + return None + else: + return pydantic_encoder(dft) + + +_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} + + +def get_annotation_from_field_info( + annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False +) -> Type[Any]: + """ + Get an annotation with validation implemented for numbers and strings based on the field_info. + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :param field_name: name of the field for use in error messages + :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment + :return: the same ``annotation`` if unmodified or a new annotation with validation in place + """ + constraints = field_info.get_constraints() + used_constraints: Set[str] = set() + if constraints: + annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) + if validate_assignment: + used_constraints.add('allow_mutation') + + unused_constraints = constraints - used_constraints + if unused_constraints: + raise ValueError( + f'On field "{field_name}" the following field constraints are set but not enforced: ' + f'{", ".join(unused_constraints)}. ' + f'\nFor more details see https://pydantic-docs.helpmanual.io/usage/schema/#unenforced-field-constraints' + ) + + return annotation + + +def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 + """ + Get an annotation with used constraints implemented for numbers and strings based on the field_info. + + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. + """ + used_constraints: Set[str] = set() + + def go(type_: Any) -> Type[Any]: + if ( + is_literal_type(type_) + or isinstance(type_, ForwardRef) + or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) + ): + return type_ + origin = get_origin(type_) + if origin is not None: + args: Tuple[Any, ...] = get_args(type_) + if any(isinstance(a, ForwardRef) for a in args): + # forward refs cause infinite recursion below + return type_ + + if origin is Annotated: + return go(args[0]) + if is_union(origin): + return Union[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, List) and ( + field_info.min_items is not None + or field_info.max_items is not None + or field_info.unique_items is not None + ): + used_constraints.update({'min_items', 'max_items', 'unique_items'}) + return conlist( + go(args[0]), + min_items=field_info.min_items, + max_items=field_info.max_items, + unique_items=field_info.unique_items, + ) + + if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + for t in (Tuple, List, Set, FrozenSet, Sequence): + if issubclass(origin, t): # type: ignore + return t[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, Dict): + return Dict[args[0], go(args[1])] # type: ignore + + attrs: Optional[Tuple[str, ...]] = None + constraint_func: Optional[Callable[..., type]] = None + if isinstance(type_, type): + if issubclass(type_, (SecretStr, SecretBytes)): + attrs = ('max_length', 'min_length') + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictStr): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = constr + elif issubclass(type_, bytes): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictBytes): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = conbytes + elif issubclass(type_, numeric_types) and not issubclass( + type_, + ( + ConstrainedInt, + ConstrainedFloat, + ConstrainedDecimal, + ConstrainedList, + ConstrainedSet, + ConstrainedFrozenSet, + bool, + ), + ): + # Is numeric type + attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') + if issubclass(type_, float): + attrs += ('allow_inf_nan',) + if issubclass(type_, Decimal): + attrs += ('max_digits', 'decimal_places') + numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch + constraint_func = _map_types_constraint[numeric_type] + + if attrs: + used_constraints.update(set(attrs)) + kwargs = { + attr_name: attr + for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) + if attr is not None + } + if kwargs: + constraint_func = cast(Callable[..., type], constraint_func) + return constraint_func(**kwargs) + return type_ + + return go(annotation), used_constraints + + +def normalize_name(name: str) -> str: + """ + Normalizes the given name. This can be applied to either a model *or* enum. + """ + return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) + + +class SkipField(Exception): + """ + Utility exception used to exclude fields from schema. + """ + + def __init__(self, message: str) -> None: + self.message = message diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.cp37-win_amd64.pyd new file mode 100644 index 00000000..7400818b Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.py new file mode 100644 index 00000000..9cdb4538 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/tools.py @@ -0,0 +1,92 @@ +import json +from functools import lru_cache +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union + +from .parse import Protocol, load_file, load_str_bytes +from .types import StrBytes +from .typing import display_as_type + +__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of') + +NameFactory = Union[str, Callable[[Type[Any]], str]] + +if TYPE_CHECKING: + from .typing import DictStrAny + + +def _generate_parsing_type_name(type_: Any) -> str: + return f'ParsingModel[{display_as_type(type_)}]' + + +@lru_cache(maxsize=2048) +def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: + from pydantic.main import create_model + + if type_name is None: + type_name = _generate_parsing_type_name + if not isinstance(type_name, str): + type_name = type_name(type_) + return create_model(type_name, __root__=(type_, ...)) + + +T = TypeVar('T') + + +def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: + model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type] + return model_type(__root__=obj).__root__ + + +def parse_file_as( + type_: Type[T], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def parse_raw_as( + type_: Type[T], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny': + """Generate a JSON schema (as dict) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs) + + +def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str: + """Generate a JSON schema (as JSON) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.cp37-win_amd64.pyd new file mode 100644 index 00000000..86514402 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.py new file mode 100644 index 00000000..f98dba3d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/types.py @@ -0,0 +1,1187 @@ +import abc +import math +import re +import warnings +from datetime import date +from decimal import Decimal +from enum import Enum +from pathlib import Path +from types import new_class +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + FrozenSet, + List, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from uuid import UUID +from weakref import WeakSet + +from . import errors +from .datetime_parse import parse_date +from .utils import import_string, update_not_none +from .validators import ( + bytes_validator, + constr_length_validator, + constr_lower, + constr_strip_whitespace, + constr_upper, + decimal_validator, + float_finite_validator, + float_validator, + frozenset_validator, + int_validator, + list_validator, + number_multiple_validator, + number_size_validator, + path_exists_validator, + path_validator, + set_validator, + str_validator, + strict_bytes_validator, + strict_float_validator, + strict_int_validator, + strict_str_validator, +) + +__all__ = [ + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', + 'StrictStr', + 'ConstrainedBytes', + 'conbytes', + 'ConstrainedList', + 'conlist', + 'ConstrainedSet', + 'conset', + 'ConstrainedFrozenSet', + 'confrozenset', + 'ConstrainedStr', + 'constr', + 'PyObject', + 'ConstrainedInt', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'ConstrainedFloat', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'ConstrainedDecimal', + 'condecimal', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'Json', + 'JsonWrapper', + 'SecretField', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'ByteSize', + 'PastDate', + 'FutureDate', + 'ConstrainedDate', + 'condate', +] + +NoneStr = Optional[str] +NoneBytes = Optional[bytes] +StrBytes = Union[str, bytes] +NoneStrBytes = Optional[StrBytes] +OptionalInt = Optional[int] +OptionalIntFloat = Union[OptionalInt, float] +OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] +OptionalDate = Optional[date] +StrIntFloat = Union[str, int, float] + +if TYPE_CHECKING: + from typing_extensions import Annotated + + from .dataclasses import Dataclass + from .main import BaseModel + from .typing import CallableGenerator + + ModelOrDc = Type[Union[BaseModel, Dataclass]] + +T = TypeVar('T') +_DEFINED_TYPES: 'WeakSet[type]' = WeakSet() + + +@overload +def _registered(typ: Type[T]) -> Type[T]: + pass + + +@overload +def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta': + pass + + +def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']: + # In order to generate valid examples of constrained types, Hypothesis needs + # to inspect the type object - so we keep a weakref to each contype object + # until it can be registered. When (or if) our Hypothesis plugin is loaded, + # it monkeypatches this function. + # If Hypothesis is never used, the total effect is to keep a weak reference + # which has minimal memory usage and doesn't even affect garbage collection. + _DEFINED_TYPES.add(typ) + return typ + + +class ConstrainedNumberMeta(type): + def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore + new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) + + if new_cls.gt is not None and new_cls.ge is not None: + raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') + if new_cls.lt is not None and new_cls.le is not None: + raise errors.ConfigError('bounds lt and le cannot be specified at the same time') + + return _registered(new_cls) # type: ignore + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + StrictBool = bool +else: + + class StrictBool(int): + """ + StrictBool to allow for bools which are not type-coerced. + """ + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='boolean') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> bool: + """ + Ensure that we only allow bools. + """ + if isinstance(value, bool): + return value + + raise errors.StrictBoolError() + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalInt = None + ge: OptionalInt = None + lt: OptionalInt = None + le: OptionalInt = None + multiple_of: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_int_validator if cls.strict else int_validator + yield number_size_validator + yield number_multiple_validator + + +def conint( + *, strict: bool = False, gt: int = None, ge: int = None, lt: int = None, le: int = None, multiple_of: int = None +) -> Type[int]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) + return type('ConstrainedIntValue', (ConstrainedInt,), namespace) + + +if TYPE_CHECKING: + PositiveInt = int + NegativeInt = int + NonPositiveInt = int + NonNegativeInt = int + StrictInt = int +else: + + class PositiveInt(ConstrainedInt): + gt = 0 + + class NegativeInt(ConstrainedInt): + lt = 0 + + class NonPositiveInt(ConstrainedInt): + le = 0 + + class NonNegativeInt(ConstrainedInt): + ge = 0 + + class StrictInt(ConstrainedInt): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalIntFloat = None + ge: OptionalIntFloat = None + lt: OptionalIntFloat = None + le: OptionalIntFloat = None + multiple_of: OptionalIntFloat = None + allow_inf_nan: Optional[bool] = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + # Modify constraints to account for differences between IEEE floats and JSON + if field_schema.get('exclusiveMinimum') == -math.inf: + del field_schema['exclusiveMinimum'] + if field_schema.get('minimum') == -math.inf: + del field_schema['minimum'] + if field_schema.get('exclusiveMaximum') == math.inf: + del field_schema['exclusiveMaximum'] + if field_schema.get('maximum') == math.inf: + del field_schema['maximum'] + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_float_validator if cls.strict else float_validator + yield number_size_validator + yield number_multiple_validator + yield float_finite_validator + + +def confloat( + *, + strict: bool = False, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: Optional[bool] = None, +) -> Type[float]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) + return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) + + +if TYPE_CHECKING: + PositiveFloat = float + NegativeFloat = float + NonPositiveFloat = float + NonNegativeFloat = float + StrictFloat = float + FiniteFloat = float +else: + + class PositiveFloat(ConstrainedFloat): + gt = 0 + + class NegativeFloat(ConstrainedFloat): + lt = 0 + + class NonPositiveFloat(ConstrainedFloat): + le = 0 + + class NonNegativeFloat(ConstrainedFloat): + ge = 0 + + class StrictFloat(ConstrainedFloat): + strict = True + + class FiniteFloat(ConstrainedFloat): + allow_inf_nan = False + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedBytes(bytes): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + strict: bool = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_bytes_validator if cls.strict else bytes_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + + +def conbytes( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + min_length: int = None, + max_length: int = None, + strict: bool = False, +) -> Type[bytes]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + min_length=min_length, + max_length=max_length, + strict=strict, + ) + return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace)) + + +if TYPE_CHECKING: + StrictBytes = bytes +else: + + class StrictBytes(ConstrainedBytes): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedStr(str): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + curtail_length: OptionalInt = None + regex: Optional[Pattern[str]] = None + strict = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + minLength=cls.min_length, + maxLength=cls.max_length, + pattern=cls.regex and cls.regex.pattern, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_str_validator if cls.strict else str_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if cls.curtail_length and len(value) > cls.curtail_length: + value = value[: cls.curtail_length] + + if cls.regex: + if not cls.regex.match(value): + raise errors.StrRegexError(pattern=cls.regex.pattern) + + return value + + +def constr( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + strict: bool = False, + min_length: int = None, + max_length: int = None, + curtail_length: int = None, + regex: str = None, +) -> Type[str]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + strict=strict, + min_length=min_length, + max_length=max_length, + curtail_length=curtail_length, + regex=regex and re.compile(regex), + ) + return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace)) + + +if TYPE_CHECKING: + StrictStr = str +else: + + class StrictStr(ConstrainedStr): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# This types superclass should be Set[T], but cython chokes on that... +class ConstrainedSet(set): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = set + __args__: Set[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.set_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]': + if v is None: + return None + + v = set_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.SetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.SetMaxLengthError(limit_value=cls.max_items) + + return v + + +def conset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[Set[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) + + +# This types superclass should be FrozenSet[T], but cython chokes on that... +class ConstrainedFrozenSet(frozenset): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = frozenset + __args__: FrozenSet[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.frozenset_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]': + if v is None: + return None + + v = frozenset_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) + + return v + + +def confrozenset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[FrozenSet[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# This types superclass should be List[T], but cython chokes on that... +class ConstrainedList(list): # type: ignore + # Needed for pydantic to detect that this is a list + __origin__ = list + __args__: Tuple[Type[T], ...] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + unique_items: Optional[bool] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.list_length_validator + if cls.unique_items: + yield cls.unique_items_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) + + @classmethod + def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': + if v is None: + return None + + v = list_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.ListMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.ListMaxLengthError(limit_value=cls.max_items) + + return v + + @classmethod + def unique_items_validator(cls, v: 'List[T]') -> 'List[T]': + for i, value in enumerate(v, start=1): + if value in v[i:]: + raise errors.ListUniqueItemsError() + + return v + + +def conlist( + item_type: Type[T], *, min_items: int = None, max_items: int = None, unique_items: bool = None +) -> Type[List[T]]: + # __args__ is needed to conform to typing generics api + namespace = dict( + min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) + ) + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +if TYPE_CHECKING: + PyObject = Callable[..., Any] +else: + + class PyObject: + validate_always = True + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> Any: + if isinstance(value, Callable): + return value + + try: + value = str_validator(value) + except errors.StrError: + raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') + + try: + return import_string(value) + except ImportError as e: + raise errors.PyObjectError(error_message=str(e)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): + gt: OptionalIntFloatDecimal = None + ge: OptionalIntFloatDecimal = None + lt: OptionalIntFloatDecimal = None + le: OptionalIntFloatDecimal = None + max_digits: OptionalInt = None + decimal_places: OptionalInt = None + multiple_of: OptionalIntFloatDecimal = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield decimal_validator + yield number_size_validator + yield number_multiple_validator + yield cls.validate + + @classmethod + def validate(cls, value: Decimal) -> Decimal: + digit_tuple, exponent = value.as_tuple()[1:] + if exponent in {'F', 'n', 'N'}: + raise errors.DecimalIsNotFiniteError() + + if exponent >= 0: + # A positive exponent adds that many trailing zeros. + digits = len(digit_tuple) + exponent + decimals = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all of the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the + # decimal point. + if abs(exponent) > len(digit_tuple): + digits = decimals = abs(exponent) + else: + digits = len(digit_tuple) + decimals = abs(exponent) + whole_digits = digits - decimals + + if cls.max_digits is not None and digits > cls.max_digits: + raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) + + if cls.decimal_places is not None and decimals > cls.decimal_places: + raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) + + if cls.max_digits is not None and cls.decimal_places is not None: + expected = cls.max_digits - cls.decimal_places + if whole_digits > expected: + raise errors.DecimalWholeDigitsError(whole_digits=expected) + + return value + + +def condecimal( + *, + gt: Decimal = None, + ge: Decimal = None, + lt: Decimal = None, + le: Decimal = None, + max_digits: int = None, + decimal_places: int = None, + multiple_of: Decimal = None, +) -> Type[Decimal]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of + ) + return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + UUID1 = UUID + UUID3 = UUID + UUID4 = UUID + UUID5 = UUID +else: + + class UUID1(UUID): + _required_version = 1 + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format=f'uuid{cls._required_version}') + + class UUID3(UUID1): + _required_version = 3 + + class UUID4(UUID1): + _required_version = 4 + + class UUID5(UUID1): + _required_version = 5 + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + FilePath = Path + DirectoryPath = Path +else: + + class FilePath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='file-path') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_file(): + raise errors.PathNotAFileError(path=value) + + return value + + class DirectoryPath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='directory-path') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_dir(): + raise errors.PathNotADirectoryError(path=value) + + return value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class JsonWrapper: + pass + + +class JsonMeta(type): + def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: + if t is Any: + return Json # allow Json[Any] to replecate plain Json + return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t})) + + +if TYPE_CHECKING: + Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] + +else: + + class Json(metaclass=JsonMeta): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='json-string') + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class SecretField(abc.ABC): + """ + Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, + the `__init__()` should be part of the abstract class and the + `get_secret_value()` method should use the generic `T` type. + + However Cython doesn't support very well generics at the moment and + the generated code fails to be imported (see + https://github.com/cython/cython/issues/2753). + """ + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() + + def __str__(self) -> str: + return '**********' if self.get_secret_value() else '' + + def __hash__(self) -> int: + return hash(self.get_secret_value()) + + @abc.abstractmethod + def get_secret_value(self) -> Any: # pragma: no cover + ... + + +class SecretStr(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> 'SecretStr': + if isinstance(value, cls): + return value + value = str_validator(value) + return cls(value) + + def __init__(self, value: str): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretStr('{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) + return str(self) + + def get_secret_value(self) -> str: + return self._secret_value + + +class SecretBytes(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> 'SecretBytes': + if isinstance(value, cls): + return value + value = bytes_validator(value) + return cls(value) + + def __init__(self, value: bytes): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretBytes(b'{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) + return str(self) + + def get_secret_value(self) -> bytes: + return self._secret_value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class PaymentCardBrand(str, Enum): + # If you add another card type, please also add it to the + # Hypothesis strategy in `pydantic._hypothesis_plugin`. + amex = 'American Express' + mastercard = 'Mastercard' + visa = 'Visa' + other = 'other' + + def __str__(self) -> str: + return self.value + + +class PaymentCardNumber(str): + """ + Based on: https://en.wikipedia.org/wiki/Payment_card_number + """ + + strip_whitespace: ClassVar[bool] = True + min_length: ClassVar[int] = 12 + max_length: ClassVar[int] = 19 + bin: str + last4: str + brand: PaymentCardBrand + + def __init__(self, card_number: str): + self.bin = card_number[:6] + self.last4 = card_number[-4:] + self.brand = self._get_brand(card_number) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield str_validator + yield constr_strip_whitespace + yield constr_length_validator + yield cls.validate_digits + yield cls.validate_luhn_check_digit + yield cls + yield cls.validate_length_for_brand + + @property + def masked(self) -> str: + num_masked = len(self) - 10 # len(bin) + len(last4) == 10 + return f'{self.bin}{"*" * num_masked}{self.last4}' + + @classmethod + def validate_digits(cls, card_number: str) -> str: + if not card_number.isdigit(): + raise errors.NotDigitError + return card_number + + @classmethod + def validate_luhn_check_digit(cls, card_number: str) -> str: + """ + Based on: https://en.wikipedia.org/wiki/Luhn_algorithm + """ + sum_ = int(card_number[-1]) + length = len(card_number) + parity = length % 2 + for i in range(length - 1): + digit = int(card_number[i]) + if i % 2 == parity: + digit *= 2 + if digit > 9: + digit -= 9 + sum_ += digit + valid = sum_ % 10 == 0 + if not valid: + raise errors.LuhnValidationError + return card_number + + @classmethod + def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': + """ + Validate length based on BIN for major brands: + https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) + """ + required_length: Union[None, int, str] = None + if card_number.brand in PaymentCardBrand.mastercard: + required_length = 16 + valid = len(card_number) == required_length + elif card_number.brand == PaymentCardBrand.visa: + required_length = '13, 16 or 19' + valid = len(card_number) in {13, 16, 19} + elif card_number.brand == PaymentCardBrand.amex: + required_length = 15 + valid = len(card_number) == required_length + else: + valid = True + if not valid: + raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) + return card_number + + @staticmethod + def _get_brand(card_number: str) -> PaymentCardBrand: + if card_number[0] == '4': + brand = PaymentCardBrand.visa + elif 51 <= int(card_number[:2]) <= 55: + brand = PaymentCardBrand.mastercard + elif card_number[:2] in {'34', '37'}: + brand = PaymentCardBrand.amex + else: + brand = PaymentCardBrand.other + return brand + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +BYTE_SIZES = { + 'b': 1, + 'kb': 10**3, + 'mb': 10**6, + 'gb': 10**9, + 'tb': 10**12, + 'pb': 10**15, + 'eb': 10**18, + 'kib': 2**10, + 'mib': 2**20, + 'gib': 2**30, + 'tib': 2**40, + 'pib': 2**50, + 'eib': 2**60, +} +BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) +byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) + + +class ByteSize(int): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, v: StrIntFloat) -> 'ByteSize': + + try: + return cls(int(v)) + except ValueError: + pass + + str_match = byte_string_re.match(str(v)) + if str_match is None: + raise errors.InvalidByteSize() + + scalar, unit = str_match.groups() + if unit is None: + unit = 'b' + + try: + unit_mult = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return cls(int(float(scalar) * unit_mult)) + + def human_readable(self, decimal: bool = False) -> str: + + if decimal: + divisor = 1000 + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] + final_unit = 'EB' + else: + divisor = 1024 + units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] + final_unit = 'EiB' + + num = float(self) + for unit in units: + if abs(num) < divisor: + return f'{num:0.1f}{unit}' + num /= divisor + + return f'{num:0.1f}{final_unit}' + + def to(self, unit: str) -> float: + + try: + unit_div = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return self / unit_div + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + PastDate = date + FutureDate = date +else: + + class PastDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value >= date.today(): + raise errors.DateNotInThePastError() + + return value + + class FutureDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value <= date.today(): + raise errors.DateNotInTheFutureError() + + return value + + +class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): + gt: OptionalDate = None + ge: OptionalDate = None + lt: OptionalDate = None + le: OptionalDate = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield number_size_validator + + +def condate( + *, + gt: date = None, + ge: date = None, + lt: date = None, + le: date = None, +) -> Type[date]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(gt=gt, ge=ge, lt=lt, le=le) + return type('ConstrainedDateValue', (ConstrainedDate,), namespace) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.cp37-win_amd64.pyd new file mode 100644 index 00000000..0e7e03ad Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.py new file mode 100644 index 00000000..5ccf266c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/typing.py @@ -0,0 +1,602 @@ +import sys +from collections.abc import Callable +from os import PathLike +from typing import ( # type: ignore + TYPE_CHECKING, + AbstractSet, + Any, + Callable as TypingCallable, + ClassVar, + Dict, + ForwardRef, + Generator, + Iterable, + List, + Mapping, + NewType, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + _eval_type, + cast, + get_type_hints, +) + +from typing_extensions import ( + Annotated, + Final, + Literal, + NotRequired as TypedDictNotRequired, + Required as TypedDictRequired, +) + +try: + from typing import _TypingBase as typing_base # type: ignore +except ImportError: + from typing import _Final as typing_base # type: ignore + +try: + from typing import GenericAlias as TypingGenericAlias # type: ignore +except ImportError: + # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) + TypingGenericAlias = () + +try: + from types import UnionType as TypesUnionType # type: ignore +except ImportError: + # python < 3.10 does not have UnionType (str | int, byte | bool and so on) + TypesUnionType = () + + +if sys.version_info < (3, 9): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + return type_._evaluate(globalns, localns) + +else: + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Even though it is the right signature for python 3.9, mypy complains with + # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... + return cast(Any, type_)._evaluate(globalns, localns, set()) + + +if sys.version_info < (3, 9): + # Ensure we always get all the whole `Annotated` hint, not just the annotated type. + # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, + # so it already returns the full annotation + get_all_type_hints = get_type_hints + +else: + + def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: + return get_type_hints(obj, globalns, localns, include_extras=True) + + +_T = TypeVar('_T') + +AnyCallable = TypingCallable[..., Any] +NoArgAnyCallable = TypingCallable[[], Any] + +# workaround for https://github.com/python/mypy/issues/9496 +AnyArgTCallable = TypingCallable[..., _T] + + +# Annotated[...] is implemented by returning an instance of one of these classes, depending on +# python/typing_extensions version. +AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'} + + +if sys.version_info < (3, 8): + + def get_origin(t: Type[Any]) -> Optional[Type[Any]]: + if type(t).__name__ in AnnotatedTypeNames: + # weirdly this is a runtime requirement, as well as for mypy + return cast(Type[Any], Annotated) + return getattr(t, '__origin__', None) + +else: + from typing import get_origin as _typing_get_origin + + def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: + """ + We can't directly use `typing.get_origin` since we need a fallback to support + custom generic classes like `ConstrainedList` + It should be useless once https://github.com/cython/cython/issues/3537 is + solved and https://github.com/pydantic/pydantic/pull/1753 is merged. + """ + if type(tp).__name__ in AnnotatedTypeNames: + return cast(Type[Any], Annotated) # mypy complains about _SpecialForm + return _typing_get_origin(tp) or getattr(tp, '__origin__', None) + + +if sys.version_info < (3, 8): + from typing import _GenericAlias + + def get_args(t: Type[Any]) -> Tuple[Any, ...]: + """Compatibility version of get_args for python 3.7. + + Mostly compatible with the python 3.8 `typing` module version + and able to handle almost all use cases. + """ + if type(t).__name__ in AnnotatedTypeNames: + return t.__args__ + t.__metadata__ + if isinstance(t, _GenericAlias): + res = t.__args__ + if t.__origin__ is Callable and res and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return getattr(t, '__args__', ()) + +else: + from typing import get_args as _typing_get_args + + def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """ + In python 3.9, `typing.Dict`, `typing.List`, ... + do have an empty `__args__` by default (instead of the generic ~T for example). + In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, + we retrieve the `_nparams` value that tells us how many parameters it needs. + """ + if hasattr(tp, '_nparams'): + return (Any,) * tp._nparams + # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` + # in python 3.10- but now returns () for `tuple` and `Tuple`. + # This will probably be clarified in pydantic v2 + try: + if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] + return ((),) + # there is a TypeError when compiled with cython + except TypeError: # pragma: no cover + pass + return () + + def get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if type(tp).__name__ in AnnotatedTypeNames: + return tp.__args__ + tp.__metadata__ + # the fallback is needed for the same reasons as `get_origin` (see above) + return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp) + + +if sys.version_info < (3, 9): + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """Python 3.9 and older only supports generics from `typing` module. + They convert strings to ForwardRef automatically. + + Examples:: + typing.List['Hero'] == typing.List[ForwardRef('Hero')] + """ + return tp + +else: + from typing import _UnionGenericAlias # type: ignore + + from typing_extensions import _AnnotatedAlias + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """ + Recursively searches for `str` type hints and replaces them with ForwardRef. + + Examples:: + convert_generics(list['Hero']) == list[ForwardRef('Hero')] + convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int + """ + origin = get_origin(tp) + if not origin or not hasattr(tp, '__args__'): + return tp + + args = get_args(tp) + + # typing.Annotated needs special treatment + if origin is Annotated: + return _AnnotatedAlias(convert_generics(args[0]), args[1:]) + + # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` + converted = tuple( + ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) + for arg in args + ) + + if converted == args: + return tp + elif isinstance(tp, TypingGenericAlias): + return TypingGenericAlias(origin, converted) + elif isinstance(tp, TypesUnionType): + # recreate types.UnionType (PEP604, Python >= 3.10) + return _UnionGenericAlias(origin, converted) + else: + try: + setattr(tp, '__args__', converted) + except AttributeError: + pass + return tp + + +if sys.version_info < (3, 10): + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union + + WithArgsTypes = (TypingGenericAlias,) + +else: + import types + import typing + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union or tp is types.UnionType # noqa: E721 + + WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) + + +if sys.version_info < (3, 9): + StrPath = Union[str, PathLike] +else: + StrPath = Union[str, PathLike] + # TODO: Once we switch to Cython 3 to handle generics properly + # (https://github.com/cython/cython/issues/2753), use following lines instead + # of the one above + # # os.PathLike only becomes subscriptable from Python 3.9 onwards + # StrPath = Union[str, PathLike[str]] + + +if TYPE_CHECKING: + from .fields import ModelField + + TupleGenerator = Generator[Tuple[str, Any], None, None] + DictStrAny = Dict[str, Any] + DictAny = Dict[Any, Any] + SetStr = Set[str] + ListStr = List[str] + IntStr = Union[int, str] + AbstractSetIntStr = AbstractSet[IntStr] + DictIntStrAny = Dict[IntStr, Any] + MappingIntStrAny = Mapping[IntStr, Any] + CallableGenerator = Generator[AnyCallable, None, None] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + AnyClassMethod = classmethod[Any] + +__all__ = ( + 'AnyCallable', + 'NoArgAnyCallable', + 'NoneType', + 'is_none_type', + 'display_as_type', + 'resolve_annotations', + 'is_callable_type', + 'is_literal_type', + 'all_literal_values', + 'is_namedtuple', + 'is_typeddict', + 'is_typeddict_special', + 'is_new_type', + 'new_type_supertype', + 'is_classvar', + 'is_finalvar', + 'update_field_forward_refs', + 'update_model_forward_refs', + 'TupleGenerator', + 'DictStrAny', + 'DictAny', + 'SetStr', + 'ListStr', + 'IntStr', + 'AbstractSetIntStr', + 'DictIntStrAny', + 'CallableGenerator', + 'ReprArgs', + 'AnyClassMethod', + 'CallableGenerator', + 'WithArgsTypes', + 'get_args', + 'get_origin', + 'get_sub_types', + 'typing_base', + 'get_all_type_hints', + 'is_union', + 'StrPath', + 'MappingIntStrAny', +) + + +NoneType = None.__class__ + + +NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) + + +if sys.version_info < (3, 8): + # Even though this implementation is slower, we need it for python 3.7: + # In python 3.7 "Literal" is not a builtin type and uses a different + # mechanism. + # for this reason `Literal[None] is Literal[None]` evaluates to `False`, + # breaking the faster implementation used for the other python versions. + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + +elif sys.version_info[:2] == (3, 8): + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey + # can change on very subtle changes like use of types in other modules, + # hopefully this check avoids that issue. + if is_literal_type(type_): # pragma: no cover + return all_literal_values(type_) == (None,) + return False + +else: + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + return False + + +def display_as_type(v: Type[Any]) -> str: + if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): + v = v.__class__ + + if is_union(get_origin(v)): + return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' + + if isinstance(v, WithArgsTypes): + # Generic alias are constructs like `list[int]` + return str(v).replace('typing.', '') + + try: + return v.__name__ + except AttributeError: + # happens with typing objects + return str(v).replace('typing.', '') + + +def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: + """ + Partially taken from typing.get_type_hints. + + Resolve string or ForwardRef annotations into type objects if possible. + """ + base_globals: Optional[Dict[str, Any]] = None + if module_name: + try: + module = sys.modules[module_name] + except KeyError: + # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 + pass + else: + base_globals = module.__dict__ + + annotations = {} + for name, value in raw_annotations.items(): + if isinstance(value, str): + if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): + value = ForwardRef(value, is_argument=False, is_class=True) + else: + value = ForwardRef(value, is_argument=False) + try: + value = _eval_type(value, base_globals, None) + except NameError: + # this is ok, it can be fixed with update_forward_refs + pass + annotations[name] = value + return annotations + + +def is_callable_type(type_: Type[Any]) -> bool: + return type_ is Callable or get_origin(type_) is Callable + + +def is_literal_type(type_: Type[Any]) -> bool: + return Literal is not None and get_origin(type_) is Literal + + +def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + return get_args(type_) + + +def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + """ + This method is used to retrieve all Literal values as + Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) + e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` + """ + if not is_literal_type(type_): + return (type_,) + + values = literal_values(type_) + return tuple(x for value in values for x in all_literal_values(value)) + + +def is_namedtuple(type_: Type[Any]) -> bool: + """ + Check if a given class is a named tuple. + It can be either a `typing.NamedTuple` or `collections.namedtuple` + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') + + +def is_typeddict(type_: Type[Any]) -> bool: + """ + Check if a given class is a typed dict (from `typing` or `typing_extensions`) + In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, dict) and hasattr(type_, '__total__') + + +def _check_typeddict_special(type_: Any) -> bool: + return type_ is TypedDictRequired or type_ is TypedDictNotRequired + + +def is_typeddict_special(type_: Any) -> bool: + """ + Check if type is a TypedDict special form (Required or NotRequired). + """ + return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) + + +test_type = NewType('test_type', str) + + +def is_new_type(type_: Type[Any]) -> bool: + """ + Check whether type_ was created using typing.NewType + """ + return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore + + +def new_type_supertype(type_: Type[Any]) -> Type[Any]: + while hasattr(type_, '__supertype__'): + type_ = type_.__supertype__ + return type_ + + +def _check_classvar(v: Optional[Type[Any]]) -> bool: + if v is None: + return False + + return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' + + +def _check_finalvar(v: Optional[Type[Any]]) -> bool: + """ + Check if a given type is a `typing.Final` type. + """ + if v is None: + return False + + return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') + + +def is_classvar(ann_type: Type[Any]) -> bool: + if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): + return True + + # this is an ugly workaround for class vars that contain forward references and are therefore themselves + # forward references, see #3679 + if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): + return True + + return False + + +def is_finalvar(ann_type: Type[Any]) -> bool: + return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) + + +def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this ModelField, globalns and localns. + """ + prepare = False + if field.type_.__class__ == ForwardRef: + prepare = True + field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) + if field.outer_type_.__class__ == ForwardRef: + prepare = True + field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) + if prepare: + field.prepare() + + if field.sub_fields: + for sub_f in field.sub_fields: + update_field_forward_refs(sub_f, globalns=globalns, localns=localns) + + if field.discriminator_key is not None: + field.prepare_discriminated_union_sub_fields() + + +def update_model_forward_refs( + model: Type[Any], + fields: Iterable['ModelField'], + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], + localns: 'DictStrAny', + exc_to_suppress: Tuple[Type[BaseException], ...] = (), +) -> None: + """ + Try to update model fields ForwardRefs based on model and localns. + """ + if model.__module__ in sys.modules: + globalns = sys.modules[model.__module__].__dict__.copy() + else: + globalns = {} + + globalns.setdefault(model.__name__, model) + + for f in fields: + try: + update_field_forward_refs(f, globalns=globalns, localns=localns) + except exc_to_suppress: + pass + + for key in set(json_encoders.keys()): + if isinstance(key, str): + fr: ForwardRef = ForwardRef(key) + elif isinstance(key, ForwardRef): + fr = key + else: + continue + + try: + new_key = evaluate_forwardref(fr, globalns, localns or None) + except exc_to_suppress: # pragma: no cover + continue + + json_encoders[new_key] = json_encoders.pop(key) + + +def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: + """ + Tries to get the class of a Type[T] annotation. Returns True if Type is used + without brackets. Otherwise returns None. + """ + if type_ is type: + return True + + if get_origin(type_) is None: + return None + + args = get_args(type_) + if not args or not isinstance(args[0], type): + return True + else: + return args[0] + + +def get_sub_types(tp: Any) -> List[Any]: + """ + Return all the types that are allowed by type `tp` + `tp` can be a `Union` of allowed types or an `Annotated` type + """ + origin = get_origin(tp) + if origin is Annotated: + return get_sub_types(get_args(tp)[0]) + elif is_union(origin): + return [x for t in get_args(tp) for x in get_sub_types(t)] + else: + return [tp] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.cp37-win_amd64.pyd new file mode 100644 index 00000000..a2578e3b Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.py new file mode 100644 index 00000000..1d016c0e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/utils.py @@ -0,0 +1,841 @@ +import keyword +import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from copy import deepcopy +from itertools import islice, zip_longest +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + Collection, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated + +from .errors import ConfigError +from .typing import ( + NoneType, + WithArgsTypes, + all_literal_values, + display_as_type, + get_args, + get_origin, + is_literal_type, + is_union, +) +from .version import version_info + +if TYPE_CHECKING: + from inspect import Signature + from pathlib import Path + + from .config import BaseConfig + from .dataclasses import Dataclass + from .fields import ModelField + from .main import BaseModel + from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs + + RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + +__all__ = ( + 'import_string', + 'sequence_like', + 'validate_field_name', + 'lenient_isinstance', + 'lenient_issubclass', + 'in_ipython', + 'is_valid_identifier', + 'deep_update', + 'update_not_none', + 'almost_equal_floats', + 'get_model', + 'to_camel', + 'is_valid_field', + 'smart_deepcopy', + 'PyObjectStr', + 'Representation', + 'GetterDict', + 'ValueItems', + 'version_info', # required here to match behaviour in v1.3 + 'ClassAttribute', + 'path_type', + 'ROOT_KEY', + 'get_unique_discriminator_alias', + 'get_discriminator_alias_and_values', + 'DUNDER_ATTRIBUTES', + 'LimitedDict', +) + +ROOT_KEY = '__root__' +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: Set[Type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + + +def import_string(dotted_path: str) -> Any: + """ + Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the + last name in the path. Raise ImportError if the import fails. + """ + from importlib import import_module + + try: + module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) + except ValueError as e: + raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e + + module = import_module(module_path) + try: + return getattr(module, class_name) + except AttributeError as e: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e + + +def truncate(v: Union[str], *, max_len: int = 80) -> str: + """ + Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long + """ + warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) + if isinstance(v, str) and len(v) > (max_len - 2): + # -3 so quote + string + … + quote has correct length + return (v[: (max_len - 3)] + '…').__repr__() + try: + v = v.__repr__() + except TypeError: + v = v.__class__.__repr__(v) # in case v is a type + if len(v) > max_len: + v = v[: max_len - 1] + '…' + return v + + +def sequence_like(v: Any) -> bool: + return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) + + +def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None: + """ + Ensure that the field's name does not shadow an existing attribute of the model. + """ + for base in bases: + if getattr(base, field_name, None): + raise NameError( + f'Field name "{field_name}" shadows a BaseModel attribute; ' + f'use a different field name with "alias=\'{field_name}\'".' + ) + + +def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(o, class_or_tuple) # type: ignore[arg-type] + except TypeError: + return False + + +def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] + except TypeError: + if isinstance(cls, WithArgsTypes): + return False + raise # pragma: no cover + + +def in_ipython() -> bool: + """ + Check whether we're in an ipython environment, including jupyter notebooks. + """ + try: + eval('__IPYTHON__') + except NameError: + return False + else: # pragma: no cover + return True + + +def is_valid_identifier(identifier: str) -> bool: + """ + Checks that a string is a valid identifier and not a Python keyword. + :param identifier: The identifier to test. + :return: True if the identifier is valid. + """ + return identifier.isidentifier() and not keyword.iskeyword(identifier) + + +KeyType = TypeVar('KeyType') + + +def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: + updated_mapping = mapping.copy() + for updating_mapping in updating_mappings: + for k, v in updating_mapping.items(): + if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): + updated_mapping[k] = deep_update(updated_mapping[k], v) + else: + updated_mapping[k] = v + return updated_mapping + + +def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: + mapping.update({k: v for k, v in update.items() if v is not None}) + + +def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: + """ + Return True if two floats are almost equal + """ + return abs(value_1 - value_2) <= delta + + +def generate_model_signature( + init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig'] +) -> 'Signature': + """ + Generate signature for model based on its fields + """ + from inspect import Parameter, Signature, signature + + from .config import Extra + + present_params = signature(init).parameters.values() + merged_params: Dict[str, Parameter] = {} + var_kw = None + use_var_kw = False + + for param in islice(present_params, 1, None): # skip self arg + if param.kind is param.VAR_KEYWORD: + var_kw = param + continue + merged_params[param.name] = param + + if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through + allow_names = config.allow_population_by_field_name + for field_name, field in fields.items(): + param_name = field.alias + if field_name in merged_params or param_name in merged_params: + continue + elif not is_valid_identifier(param_name): + if allow_names and is_valid_identifier(field_name): + param_name = field_name + else: + use_var_kw = True + continue + + # TODO: replace annotation with actual expected types once #1055 solved + kwargs = {'default': field.default} if not field.required else {} + merged_params[param_name] = Parameter( + param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs + ) + + if config.extra is Extra.allow: + use_var_kw = True + + if var_kw and use_var_kw: + # Make sure the parameter for extra kwargs + # does not have the same name as a field + default_model_signature = [ + ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD), + ('data', Parameter.VAR_KEYWORD), + ] + if [(p.name, p.kind) for p in present_params] == default_model_signature: + # if this is the standard model signature, use extra_data as the extra args name + var_kw_name = 'extra_data' + else: + # else start from var_kw + var_kw_name = var_kw.name + + # generate a name that's definitely unique + while var_kw_name in fields: + var_kw_name += '_' + merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) + + return Signature(parameters=list(merged_params.values()), return_annotation=None) + + +def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']: + from .main import BaseModel + + try: + model_cls = obj.__pydantic_model__ # type: ignore + except AttributeError: + model_cls = obj + + if not issubclass(model_cls, BaseModel): + raise TypeError('Unsupported type, must be either BaseModel or dataclass') + return model_cls + + +def to_camel(string: str) -> str: + return ''.join(word.capitalize() for word in string.split('_')) + + +def to_lower_camel(string: str) -> str: + if len(string) >= 1: + pascal_string = to_camel(string) + return pascal_string[0].lower() + pascal_string[1:] + return string.lower() + + +T = TypeVar('T') + + +def unique_list( + input_list: Union[List[T], Tuple[T, ...]], + *, + name_factory: Callable[[T], str] = str, +) -> List[T]: + """ + Make a list unique while maintaining order. + We update the list if another one with the same name is set + (e.g. root validator overridden in subclass) + """ + result: List[T] = [] + result_names: List[str] = [] + for v in input_list: + v_name = name_factory(v) + if v_name not in result_names: + result_names.append(v_name) + result.append(v) + else: + result[result_names.index(v_name)] = v + + return result + + +class PyObjectStr(str): + """ + String class where repr doesn't include quotes. Useful with Representation when you want to return a string + representation of something that valid (or pseudo-valid) python. + """ + + def __repr__(self) -> str: + return str(self) + + +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> 'ReprArgs': + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + '(' + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + '=' + yield fmt(value) + yield ',' + yield 0 + yield -1 + yield ')' + + def __str__(self) -> str: + return self.__repr_str__(' ') + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> 'RichReprResult': + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + +class GetterDict(Representation): + """ + Hack to make object's smell just enough like dicts for validate_model. + + We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. + """ + + __slots__ = ('_obj',) + + def __init__(self, obj: Any): + self._obj = obj + + def __getitem__(self, key: str) -> Any: + try: + return getattr(self._obj, key) + except AttributeError as e: + raise KeyError(key) from e + + def get(self, key: Any, default: Any = None) -> Any: + return getattr(self._obj, key, default) + + def extra_keys(self) -> Set[Any]: + """ + We don't want to get any other attributes of obj if the model didn't explicitly ask for them + """ + return set() + + def keys(self) -> List[Any]: + """ + Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python + dictionaries. + """ + return list(self) + + def values(self) -> List[Any]: + return [self[k] for k in self] + + def items(self) -> Iterator[Tuple[str, Any]]: + for k in self: + yield k, self.get(k) + + def __iter__(self) -> Iterator[str]: + for name in dir(self._obj): + if not name.startswith('_'): + yield name + + def __len__(self) -> int: + return sum(1 for _ in self) + + def __contains__(self, item: Any) -> bool: + return item in self.keys() + + def __eq__(self, other: Any) -> bool: + return dict(self) == dict(other.items()) + + def __repr_args__(self) -> 'ReprArgs': + return [(None, dict(self))] + + def __repr_name__(self) -> str: + return f'GetterDict[{display_as_type(self._obj)}]' + + +class ValueItems(Representation): + """ + Class for more convenient calculation of excluded or included fields on values. + """ + + __slots__ = ('_items', '_type') + + def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None: + items = self._coerce_items(items) + + if isinstance(value, (list, tuple)): + items = self._normalize_indexes(items, len(value)) + + self._items: 'MappingIntStrAny' = items + + def is_excluded(self, item: Any) -> bool: + """ + Check if item is fully excluded. + + :param item: key or index of a value + """ + return self.is_true(self._items.get(item)) + + def is_included(self, item: Any) -> bool: + """ + Check if value is contained in self._items + + :param item: key or index of value + """ + return item in self._items + + def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]: + """ + :param e: key or index of element on value + :return: raw values for element if self._items is dict and contain needed element + """ + + item = self._items.get(e) + return item if not self.is_true(item) else None + + def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny': + """ + :param items: dict or set of indexes which will be normalized + :param v_length: length of sequence indexes of which will be + + >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) + {0: True, 2: True, 3: True} + >>> self._normalize_indexes({'__all__': True}, 4) + {0: True, 1: True, 2: True, 3: True} + """ + + normalized_items: 'DictIntStrAny' = {} + all_items = None + for i, v in items.items(): + if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): + raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') + if i == '__all__': + all_items = self._coerce_value(v) + continue + if not isinstance(i, int): + raise TypeError( + 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' + 'expected integer keys or keyword "__all__"' + ) + normalized_i = v_length + i if i < 0 else i + normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) + + if not all_items: + return normalized_items + if self.is_true(all_items): + for i in range(v_length): + normalized_items.setdefault(i, ...) + return normalized_items + for i in range(v_length): + normalized_item = normalized_items.setdefault(i, {}) + if not self.is_true(normalized_item): + normalized_items[i] = self.merge(all_items, normalized_item) + return normalized_items + + @classmethod + def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: + """ + Merge a ``base`` item with an ``override`` item. + + Both ``base`` and ``override`` are converted to dictionaries if possible. + Sets are converted to dictionaries with the sets entries as keys and + Ellipsis as values. + + Each key-value pair existing in ``base`` is merged with ``override``, + while the rest of the key-value pairs are updated recursively with this function. + + Merging takes place based on the "union" of keys if ``intersect`` is + set to ``False`` (default) and on the intersection of keys if + ``intersect`` is set to ``True``. + """ + override = cls._coerce_value(override) + base = cls._coerce_value(base) + if override is None: + return base + if cls.is_true(base) or base is None: + return override + if cls.is_true(override): + return base if intersect else override + + # intersection or union of keys while preserving ordering: + if intersect: + merge_keys = [k for k in base if k in override] + [k for k in override if k in base] + else: + merge_keys = list(base) + [k for k in override if k not in base] + + merged: 'DictIntStrAny' = {} + for k in merge_keys: + merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) + if merged_item is not None: + merged[k] = merged_item + + return merged + + @staticmethod + def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny': + if isinstance(items, Mapping): + pass + elif isinstance(items, AbstractSet): + items = dict.fromkeys(items, ...) + else: + class_name = getattr(items, '__class__', '???') + assert_never( + items, + f'Unexpected type of exclude value {class_name}', + ) + return items + + @classmethod + def _coerce_value(cls, value: Any) -> Any: + if value is None or cls.is_true(value): + return value + return cls._coerce_items(value) + + @staticmethod + def is_true(v: Any) -> bool: + return v is True or v is ... + + def __repr_args__(self) -> 'ReprArgs': + return [(None, self._items)] + + +class ClassAttribute: + """ + Hide class attribute from its instances + """ + + __slots__ = ( + 'name', + 'value', + ) + + def __init__(self, name: str, value: Any) -> None: + self.name = name + self.value = value + + def __get__(self, instance: Any, owner: Type[Any]) -> None: + if instance is None: + return self.value + raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') + + +path_types = { + 'is_dir': 'directory', + 'is_file': 'file', + 'is_mount': 'mount point', + 'is_symlink': 'symlink', + 'is_block_device': 'block device', + 'is_char_device': 'char device', + 'is_fifo': 'FIFO', + 'is_socket': 'socket', +} + + +def path_type(p: 'Path') -> str: + """ + Find out what sort of thing a path is. + """ + assert p.exists(), 'path does not exist' + for method, name in path_types.items(): + if getattr(p, method)(): + return name + + return 'unknown' + + +Obj = TypeVar('Obj') + + +def smart_deepcopy(obj: Obj) -> Obj: + """ + Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects + """ + + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + try: + if not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method + except (TypeError, ValueError, RuntimeError): + # do we really dare to catch ALL errors? Seems a bit risky + pass + + return deepcopy(obj) # slowest way when we actually might need a deepcopy + + +def is_valid_field(name: str) -> bool: + if not name.startswith('_'): + return True + return ROOT_KEY == name + + +DUNDER_ATTRIBUTES = { + '__annotations__', + '__classcell__', + '__doc__', + '__module__', + '__orig_bases__', + '__orig_class__', + '__qualname__', +} + + +def is_valid_private_name(name: str) -> bool: + return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES + + +_EMPTY = object() + + +def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: + """ + Check that the items of `left` are the same objects as those in `right`. + + >>> a, b = object(), object() + >>> all_identical([a, b, a], [a, b, a]) + True + >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" + False + """ + for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): + if left_item is not right_item: + return False + return True + + +def assert_never(obj: NoReturn, msg: str) -> NoReturn: + """ + Helper to make sure that we have covered all possible types. + + This is mostly useful for ``mypy``, docs: + https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks + """ + raise TypeError(msg) + + +def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: + """Validate that all aliases are the same and if that's the case return the alias""" + unique_aliases = set(all_aliases) + if len(unique_aliases) > 1: + raise ConfigError( + f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' + ) + return unique_aliases.pop() + + +def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: + """ + Get alias and all valid values in the `Literal` type of the discriminator field + `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. + """ + is_root_model = getattr(tp, '__custom_root_type__', False) + + if get_origin(tp) is Annotated: + tp = get_args(tp)[0] + + if hasattr(tp, '__pydantic_model__'): + tp = tp.__pydantic_model__ + + if is_union(get_origin(tp)): + alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) + return alias, tuple(v for values in all_values for v in values) + elif is_root_model: + union_type = tp.__fields__[ROOT_KEY].type_ + alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) + + if len(set(all_values)) > 1: + raise ConfigError( + f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}' + ) + + return alias, all_values[0] + + else: + try: + t_discriminator_type = tp.__fields__[discriminator_key].type_ + except AttributeError as e: + raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e + except KeyError as e: + raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e + + if not is_literal_type(t_discriminator_type): + raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`') + + return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) + + +def _get_union_alias_and_all_values( + union_type: Type[Any], discriminator_key: str +) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: + zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] + # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] + all_aliases, all_values = zip(*zipped_aliases_values) + return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values + + +KT = TypeVar('KT') +VT = TypeVar('VT') +if TYPE_CHECKING: + # Annoying inheriting from `MutableMapping` and `dict` breaks cython, hence this work around + class LimitedDict(dict, MutableMapping[KT, VT]): # type: ignore[type-arg] + def __init__(self, size_limit: int = 1000): + ... + +else: + + class LimitedDict(dict): + """ + Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage. + + Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache. + + Annoying inheriting from `MutableMapping` breaks cython. + """ + + def __init__(self, size_limit: int = 1000): + self.size_limit = size_limit + super().__init__() + + def __setitem__(self, __key: Any, __value: Any) -> None: + super().__setitem__(__key, __value) + if len(self) > self.size_limit: + excess = len(self) - self.size_limit + self.size_limit // 10 + to_remove = list(self.keys())[:excess] + for key in to_remove: + del self[key] + + def __class_getitem__(cls, *args: Any) -> Any: + # to avoid errors with 3.7 + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.cp37-win_amd64.pyd new file mode 100644 index 00000000..7bc532b3 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.py new file mode 100644 index 00000000..1c19fc92 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/validators.py @@ -0,0 +1,752 @@ +import math +import re +from collections import OrderedDict, deque +from collections.abc import Hashable as CollectionsHashable +from datetime import date, datetime, time, timedelta +from decimal import Decimal, DecimalException +from enum import Enum, IntEnum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Hashable, + List, + NamedTuple, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) +from uuid import UUID + +from . import errors +from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time +from .typing import ( + AnyCallable, + all_literal_values, + display_as_type, + get_class, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_typeddict, +) +from .utils import almost_equal_floats, lenient_issubclass, sequence_like + +if TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from .config import BaseConfig + from .fields import ModelField + from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt + + ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] + AnyOrderedDict = OrderedDict[Any, Any] + Number = Union[int, float, Decimal] + StrBytes = Union[str, bytes] + + +def str_validator(v: Any) -> Union[str]: + if isinstance(v, str): + if isinstance(v, Enum): + return v.value + else: + return v + elif isinstance(v, (float, int, Decimal)): + # is there anything else we want to add here? If you think so, create an issue. + return str(v) + elif isinstance(v, (bytes, bytearray)): + return v.decode() + else: + raise errors.StrError() + + +def strict_str_validator(v: Any) -> Union[str]: + if isinstance(v, str) and not isinstance(v, Enum): + return v + raise errors.StrError() + + +def bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + elif isinstance(v, str): + return v.encode() + elif isinstance(v, (float, int, Decimal)): + return str(v).encode() + else: + raise errors.BytesError() + + +def strict_bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + else: + raise errors.BytesError() + + +BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} +BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} + + +def bool_validator(v: Any) -> bool: + if v is True or v is False: + return v + if isinstance(v, bytes): + v = v.decode() + if isinstance(v, str): + v = v.lower() + try: + if v in BOOL_TRUE: + return True + if v in BOOL_FALSE: + return False + except TypeError: + raise errors.BoolError() + raise errors.BoolError() + + +def int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + + try: + return int(v) + except (TypeError, ValueError, OverflowError): + raise errors.IntegerError() + + +def strict_int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + raise errors.IntegerError() + + +def float_validator(v: Any) -> float: + if isinstance(v, float): + return v + + try: + return float(v) + except (TypeError, ValueError): + raise errors.FloatError() + + +def strict_float_validator(v: Any) -> float: + if isinstance(v, float): + return v + raise errors.FloatError() + + +def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number': + allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None) + if allow_inf_nan is None: + allow_inf_nan = config.allow_inf_nan + + if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): + raise errors.NumberNotFiniteError() + return v + + +def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.multiple_of is not None: + mod = float(v) / float(field_type.multiple_of) % 1 + if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): + raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) + return v + + +def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.gt is not None and not v > field_type.gt: + raise errors.NumberNotGtError(limit_value=field_type.gt) + elif field_type.ge is not None and not v >= field_type.ge: + raise errors.NumberNotGeError(limit_value=field_type.ge) + + if field_type.lt is not None and not v < field_type.lt: + raise errors.NumberNotLtError(limit_value=field_type.lt) + if field_type.le is not None and not v <= field_type.le: + raise errors.NumberNotLeError(limit_value=field_type.le) + + return v + + +def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': + """Validate ``const`` fields. + + The value provided for a ``const`` field must be equal to the default value + of the field. This is to support the keyword of the same name in JSON + Schema. + """ + if v != field.default: + raise errors.WrongConstantError(given=v, permitted=[field.default]) + + return v + + +def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': + return v.strip() + + +def anystr_upper(v: 'StrBytes') -> 'StrBytes': + return v.upper() + + +def anystr_lower(v: 'StrBytes') -> 'StrBytes': + return v.lower() + + +def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': + if isinstance(v, OrderedDict): + return v + + try: + return OrderedDict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def dict_validator(v: Any) -> Dict[Any, Any]: + if isinstance(v, dict): + return v + + try: + return dict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def list_validator(v: Any) -> List[Any]: + if isinstance(v, list): + return v + elif sequence_like(v): + return list(v) + else: + raise errors.ListError() + + +def tuple_validator(v: Any) -> Tuple[Any, ...]: + if isinstance(v, tuple): + return v + elif sequence_like(v): + return tuple(v) + else: + raise errors.TupleError() + + +def set_validator(v: Any) -> Set[Any]: + if isinstance(v, set): + return v + elif sequence_like(v): + return set(v) + else: + raise errors.SetError() + + +def frozenset_validator(v: Any) -> FrozenSet[Any]: + if isinstance(v, frozenset): + return v + elif sequence_like(v): + return frozenset(v) + else: + raise errors.FrozenSetError() + + +def deque_validator(v: Any) -> Deque[Any]: + if isinstance(v, deque): + return v + elif sequence_like(v): + return deque(v) + else: + raise errors.DequeError() + + +def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: + try: + enum_v = field.type_(v) + except ValueError: + # field.type_ should be an enum, so will be iterable + raise errors.EnumMemberError(enum_values=list(field.type_)) + return enum_v.value if config.use_enum_values else enum_v + + +def uuid_validator(v: Any, field: 'ModelField') -> UUID: + try: + if isinstance(v, str): + v = UUID(v) + elif isinstance(v, (bytes, bytearray)): + try: + v = UUID(v.decode()) + except ValueError: + # 16 bytes in big-endian order as the bytes argument fail + # the above check + v = UUID(bytes=v) + except ValueError: + raise errors.UUIDError() + + if not isinstance(v, UUID): + raise errors.UUIDError() + + required_version = getattr(field.type_, '_required_version', None) + if required_version and v.version != required_version: + raise errors.UUIDVersionError(required_version=required_version) + + return v + + +def decimal_validator(v: Any) -> Decimal: + if isinstance(v, Decimal): + return v + elif isinstance(v, (bytes, bytearray)): + v = v.decode() + + v = str(v).strip() + + try: + v = Decimal(v) + except DecimalException: + raise errors.DecimalError() + + if not v.is_finite(): + raise errors.DecimalIsNotFiniteError() + + return v + + +def hashable_validator(v: Any) -> Hashable: + if isinstance(v, Hashable): + return v + + raise errors.HashableError() + + +def ip_v4_address_validator(v: Any) -> IPv4Address: + if isinstance(v, IPv4Address): + return v + + try: + return IPv4Address(v) + except ValueError: + raise errors.IPv4AddressError() + + +def ip_v6_address_validator(v: Any) -> IPv6Address: + if isinstance(v, IPv6Address): + return v + + try: + return IPv6Address(v) + except ValueError: + raise errors.IPv6AddressError() + + +def ip_v4_network_validator(v: Any) -> IPv4Network: + """ + Assume IPv4Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network + """ + if isinstance(v, IPv4Network): + return v + + try: + return IPv4Network(v) + except ValueError: + raise errors.IPv4NetworkError() + + +def ip_v6_network_validator(v: Any) -> IPv6Network: + """ + Assume IPv6Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network + """ + if isinstance(v, IPv6Network): + return v + + try: + return IPv6Network(v) + except ValueError: + raise errors.IPv6NetworkError() + + +def ip_v4_interface_validator(v: Any) -> IPv4Interface: + if isinstance(v, IPv4Interface): + return v + + try: + return IPv4Interface(v) + except ValueError: + raise errors.IPv4InterfaceError() + + +def ip_v6_interface_validator(v: Any) -> IPv6Interface: + if isinstance(v, IPv6Interface): + return v + + try: + return IPv6Interface(v) + except ValueError: + raise errors.IPv6InterfaceError() + + +def path_validator(v: Any) -> Path: + if isinstance(v, Path): + return v + + try: + return Path(v) + except TypeError: + raise errors.PathError() + + +def path_exists_validator(v: Any) -> Path: + if not v.exists(): + raise errors.PathNotExistsError(path=v) + + return v + + +def callable_validator(v: Any) -> AnyCallable: + """ + Perform a simple check if the value is callable. + + Note: complete matching of argument type hints and return types is not performed + """ + if callable(v): + return v + + raise errors.CallableError(value=v) + + +def enum_validator(v: Any) -> Enum: + if isinstance(v, Enum): + return v + + raise errors.EnumError(value=v) + + +def int_enum_validator(v: Any) -> IntEnum: + if isinstance(v, IntEnum): + return v + + raise errors.IntEnumError(value=v) + + +def make_literal_validator(type_: Any) -> Callable[[Any], Any]: + permitted_choices = all_literal_values(type_) + + # To have a O(1) complexity and still return one of the values set inside the `Literal`, + # we create a dict with the set values (a set causes some problems with the way intersection works). + # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) + allowed_choices = {v: v for v in permitted_choices} + + def literal_validator(v: Any) -> Any: + try: + return allowed_choices[v] + except KeyError: + raise errors.WrongConstantError(given=v, permitted=permitted_choices) + + return literal_validator + + +def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace + if strip_whitespace: + v = v.strip() + + return v + + +def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + upper = field.type_.to_upper or config.anystr_upper + if upper: + v = v.upper() + + return v + + +def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + lower = field.type_.to_lower or config.anystr_lower + if lower: + v = v.lower() + return v + + +def validate_json(v: Any, config: 'BaseConfig') -> Any: + if v is None: + # pass None through to other validators + return v + try: + return config.json_loads(v) # type: ignore + except ValueError: + raise errors.JsonError() + except TypeError: + raise errors.JsonTypeError() + + +T = TypeVar('T') + + +def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: + def arbitrary_type_validator(v: Any) -> T: + if isinstance(v, type_): + return v + raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) + + return arbitrary_type_validator + + +def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: + def class_validator(v: Any) -> Type[T]: + if lenient_issubclass(v, type_): + return v + raise errors.SubclassError(expected_class=type_) + + return class_validator + + +def any_class_validator(v: Any) -> Type[T]: + if isinstance(v, type): + return v + raise errors.ClassError() + + +def none_validator(v: Any) -> 'Literal[None]': + if v is None: + return v + raise errors.NotNoneError() + + +def pattern_validator(v: Any) -> Pattern[str]: + if isinstance(v, Pattern): + return v + + str_value = str_validator(v) + + try: + return re.compile(str_value) + except re.error: + raise errors.PatternError() + + +NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple) + + +def make_namedtuple_validator( + namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig'] +) -> Callable[[Tuple[Any, ...]], NamedTupleT]: + from .annotated_types import create_model_from_namedtuple + + NamedTupleModel = create_model_from_namedtuple( + namedtuple_cls, + __config__=config, + __module__=namedtuple_cls.__module__, + ) + namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] + + def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: + annotations = NamedTupleModel.__annotations__ + + if len(values) > len(annotations): + raise errors.ListMaxLengthError(limit_value=len(annotations)) + + dict_values: Dict[str, Any] = dict(zip(annotations, values)) + validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) + return namedtuple_cls(**validated_dict_values) + + return namedtuple_validator + + +def make_typeddict_validator( + typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type] +) -> Callable[[Any], Dict[str, Any]]: + from .annotated_types import create_model_from_typeddict + + TypedDictModel = create_model_from_typeddict( + typeddict_cls, + __config__=config, + __module__=typeddict_cls.__module__, + ) + typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] + + def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type] + return TypedDictModel.parse_obj(values).dict(exclude_unset=True) + + return typeddict_validator + + +class IfConfig: + def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: + self.validator = validator + self.config_attr_names = config_attr_names + self.ignored_value = ignored_value + + def check(self, config: Type['BaseConfig']) -> bool: + return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) + + +# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, +# IPv4Interface before IPv4Address, etc +_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ + (IntEnum, [int_validator, enum_member_validator]), + (Enum, [enum_member_validator]), + ( + str, + [ + str_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + ( + bytes, + [ + bytes_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + (bool, [bool_validator]), + (int, [int_validator]), + (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]), + (Path, [path_validator]), + (datetime, [parse_datetime]), + (date, [parse_date]), + (time, [parse_time]), + (timedelta, [parse_duration]), + (OrderedDict, [ordered_dict_validator]), + (dict, [dict_validator]), + (list, [list_validator]), + (tuple, [tuple_validator]), + (set, [set_validator]), + (frozenset, [frozenset_validator]), + (deque, [deque_validator]), + (UUID, [uuid_validator]), + (Decimal, [decimal_validator]), + (IPv4Interface, [ip_v4_interface_validator]), + (IPv6Interface, [ip_v6_interface_validator]), + (IPv4Address, [ip_v4_address_validator]), + (IPv6Address, [ip_v6_address_validator]), + (IPv4Network, [ip_v4_network_validator]), + (IPv6Network, [ip_v6_network_validator]), +] + + +def find_validators( # noqa: C901 (ignore complexity) + type_: Type[Any], config: Type['BaseConfig'] +) -> Generator[AnyCallable, None, None]: + from .dataclasses import is_builtin_dataclass, make_dataclass_validator + + if type_ is Any or type_ is object: + return + type_type = type_.__class__ + if type_type == ForwardRef or type_type == TypeVar: + return + + if is_none_type(type_): + yield none_validator + return + if type_ is Pattern or type_ is re.Pattern: + yield pattern_validator + return + if type_ is Hashable or type_ is CollectionsHashable: + yield hashable_validator + return + if is_callable_type(type_): + yield callable_validator + return + if is_literal_type(type_): + yield make_literal_validator(type_) + return + if is_builtin_dataclass(type_): + yield from make_dataclass_validator(type_, config) + return + if type_ is Enum: + yield enum_validator + return + if type_ is IntEnum: + yield int_enum_validator + return + if is_namedtuple(type_): + yield tuple_validator + yield make_namedtuple_validator(type_, config) + return + if is_typeddict(type_): + yield make_typeddict_validator(type_, config) + return + + class_ = get_class(type_) + if class_ is not None: + if class_ is not Any and isinstance(class_, type): + yield make_class_validator(class_) + else: + yield any_class_validator + return + + for val_type, validators in _VALIDATORS: + try: + if issubclass(type_, val_type): + for v in validators: + if isinstance(v, IfConfig): + if v.check(config): + yield v.validator + else: + yield v + return + except TypeError: + raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') + + if config.arbitrary_types_allowed: + yield make_arbitrary_type_validator(type_) + else: + raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.cp37-win_amd64.pyd new file mode 100644 index 00000000..08a99998 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.py new file mode 100644 index 00000000..2ec0b8d4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/pydantic/version.py @@ -0,0 +1,38 @@ +__all__ = 'compiled', 'VERSION', 'version_info' + +VERSION = '1.10.1' + +try: + import cython # type: ignore +except ImportError: + compiled: bool = False +else: # pragma: no cover + try: + compiled = cython.compiled + except AttributeError: + compiled = False + + +def version_info() -> str: + import platform + import sys + from importlib import import_module + from pathlib import Path + + optional_deps = [] + for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'): + try: + import_module(p.replace('-', '_')) + except ImportError: + continue + optional_deps.append(p) + + info = { + 'pydantic version': VERSION, + 'pydantic compiled': compiled, + 'install path': Path(__file__).resolve().parent, + 'python version': sys.version, + 'platform': platform.platform(), + 'optional deps. installed': optional_deps, + } + return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/LICENSE new file mode 100644 index 00000000..39372fee --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/LICENSE @@ -0,0 +1,87 @@ +python-dotenv +Copyright (c) 2014, Saurabh Kumar + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of python-dotenv nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +django-dotenv-rw +Copyright (c) 2013, Ted Tieken + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of django-dotenv nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Original django-dotenv +Copyright (c) 2013, Jacob Kaplan-Moss + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of django-dotenv nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/METADATA new file mode 100644 index 00000000..5d16fa86 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/METADATA @@ -0,0 +1,594 @@ +Metadata-Version: 2.1 +Name: python-dotenv +Version: 0.20.0 +Summary: Read key-value pairs from a .env file and set them as environment variables +Home-page: https://github.com/theskumar/python-dotenv +Author: Saurabh Kumar +Author-email: me+github@saurabh-kumar.com +License: BSD-3-Clause +Keywords: environment variables,deployments,settings,env,dotenv,configurations,python +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Classifier: Environment :: Web Environment +Requires-Python: >=3.5 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: cli +Requires-Dist: click (>=5.0) ; extra == 'cli' + +# python-dotenv + +[![Build Status][build_status_badge]][build_status_link] +[![PyPI version][pypi_badge]][pypi_link] + +Python-dotenv reads key-value pairs from a `.env` file and can set them as environment +variables. It helps in the development of applications following the +[12-factor](http://12factor.net/) principles. + +- [Getting Started](#getting-started) +- [Other Use Cases](#other-use-cases) + * [Load configuration without altering the environment](#load-configuration-without-altering-the-environment) + * [Parse configuration as a stream](#parse-configuration-as-a-stream) + * [Load .env files in IPython](#load-env-files-in-ipython) +- [Command-line Interface](#command-line-interface) +- [File format](#file-format) + * [Multiline values](#multiline-values) + * [Variable expansion](#variable-expansion) +- [Related Projects](#related-projects) +- [Acknowledgements](#acknowledgements) + +## Getting Started + +```shell +pip install python-dotenv +``` + +If your application takes its configuration from environment variables, like a 12-factor +application, launching it in development is not very practical because you have to set +those environment variables yourself. + +To help you with that, you can add Python-dotenv to your application to make it load the +configuration from a `.env` file when it is present (e.g. in development) while remaining +configurable via the environment: + +```python +from dotenv import load_dotenv + +load_dotenv() # take environment variables from .env. + +# Code of your application, which uses environment variables (e.g. from `os.environ` or +# `os.getenv`) as if they came from the actual environment. +``` + +By default, `load_dotenv` doesn't override existing environment variables. + +To configure the development environment, add a `.env` in the root directory of your +project: + +``` +. +├── .env +└── foo.py +``` + +The syntax of `.env` files supported by python-dotenv is similar to that of Bash: + +```bash +# Development settings +DOMAIN=example.org +ADMIN_EMAIL=admin@${DOMAIN} +ROOT_URL=${DOMAIN}/app +``` + +If you use variables in values, ensure they are surrounded with `{` and `}`, like +`${DOMAIN}`, as bare variables such as `$DOMAIN` are not expanded. + +You will probably want to add `.env` to your `.gitignore`, especially if it contains +secrets like a password. + +See the section "File format" below for more information about what you can write in a +`.env` file. + +## Other Use Cases + +### Load configuration without altering the environment + +The function `dotenv_values` works more or less the same way as `load_dotenv`, except it +doesn't touch the environment, it just returns a `dict` with the values parsed from the +`.env` file. + +```python +from dotenv import dotenv_values + +config = dotenv_values(".env") # config = {"USER": "foo", "EMAIL": "foo@example.org"} +``` + +This notably enables advanced configuration management: + +```python +import os +from dotenv import dotenv_values + +config = { + **dotenv_values(".env.shared"), # load shared development variables + **dotenv_values(".env.secret"), # load sensitive variables + **os.environ, # override loaded values with environment variables +} +``` + +### Parse configuration as a stream + +`load_dotenv` and `dotenv_values` accept [streams][python_streams] via their `stream` +argument. It is thus possible to load the variables from sources other than the +filesystem (e.g. the network). + +```python +from io import StringIO + +from dotenv import load_dotenv + +config = StringIO("USER=foo\nEMAIL=foo@example.org") +load_dotenv(stream=config) +``` + +### Load .env files in IPython + +You can use dotenv in IPython. By default, it will use `find_dotenv` to search for a +`.env` file: + +```python +%load_ext dotenv +%dotenv +``` + +You can also specify a path: + +```python +%dotenv relative/or/absolute/path/to/.env +``` + +Optional flags: + +- `-o` to override existing variables. +- `-v` for increased verbosity. + +## Command-line Interface + +A CLI interface `dotenv` is also included, which helps you manipulate the `.env` file +without manually opening it. + +```shell +$ pip install "python-dotenv[cli]" +$ dotenv set USER foo +$ dotenv set EMAIL foo@example.org +$ dotenv list +USER=foo +EMAIL=foo@example.org +$ dotenv run -- python foo.py +``` + +Run `dotenv --help` for more information about the options and subcommands. + +## File format + +The format is not formally specified and still improves over time. That being said, +`.env` files should mostly look like Bash files. + +Keys can be unquoted or single-quoted. Values can be unquoted, single- or double-quoted. +Spaces before and after keys, equal signs, and values are ignored. Values can be followed +by a comment. Lines can start with the `export` directive, which has no effect on their +interpretation. + +Allowed escape sequences: + +- in single-quoted values: `\\`, `\'` +- in double-quoted values: `\\`, `\'`, `\"`, `\a`, `\b`, `\f`, `\n`, `\r`, `\t`, `\v` + +### Multiline values + +It is possible for single- or double-quoted values to span multiple lines. The following +examples are equivalent: + +```bash +FOO="first line +second line" +``` + +```bash +FOO="first line\nsecond line" +``` + +### Variable expansion + +Python-dotenv can interpolate variables using POSIX variable expansion. + +With `load_dotenv(override=True)` or `dotenv_values()`, the value of a variable is the +first of the values defined in the following list: + +- Value of that variable in the `.env` file. +- Value of that variable in the environment. +- Default value, if provided. +- Empty string. + +With `load_dotenv(override=False)`, the value of a variable is the first of the values +defined in the following list: + +- Value of that variable in the environment. +- Value of that variable in the `.env` file. +- Default value, if provided. +- Empty string. + +## Related Projects + +- [Honcho](https://github.com/nickstenning/honcho) - For managing + Procfile-based applications. +- [django-dotenv](https://github.com/jpadilla/django-dotenv) +- [django-environ](https://github.com/joke2k/django-environ) +- [django-environ-2](https://github.com/sergeyklay/django-environ-2) +- [django-configuration](https://github.com/jezdez/django-configurations) +- [dump-env](https://github.com/sobolevn/dump-env) +- [environs](https://github.com/sloria/environs) +- [dynaconf](https://github.com/rochacbruno/dynaconf) + +## Acknowledgements + +This project is currently maintained by [Saurabh Kumar](https://saurabh-kumar.com) and +[Bertrand Bonnefoy-Claudet](https://github.com/bbc2) and would not have been possible +without the support of these [awesome +people](https://github.com/theskumar/python-dotenv/graphs/contributors). + +[build_status_badge]: https://github.com/theskumar/python-dotenv/actions/workflows/test.yml/badge.svg +[build_status_link]: https://github.com/theskumar/python-dotenv/actions/workflows/test.yml +[pypi_badge]: https://badge.fury.io/py/python-dotenv.svg +[pypi_link]: http://badge.fury.io/py/python-dotenv +[python_streams]: https://docs.python.org/3/library/io.html + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this +project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.20.0] - 2022-03-24 + +### Added + +- Add `encoding` (`Optional[str]`) parameter to `get_key`, `set_key` and `unset_key`. + (#379 by [@bbc2]) + +### Fixed + +- Use dict to specify the `entry_points` parameter of `setuptools.setup` (#376 by + [@mgorny]). +- Don't build universal wheels (#387 by [@bbc2]). + +## [0.19.2] - 2021-11-11 + +### Fixed + +- In `set_key`, add missing newline character before new entry if necessary. (#361 by + [@bbc2]) + +## [0.19.1] - 2021-08-09 + +### Added + +- Add support for Python 3.10. (#359 by [@theskumar]) + +## [0.19.0] - 2021-07-24 + +### Changed + +- Require Python 3.5 or a later version. Python 2 and 3.4 are no longer supported. (#341 + by [@bbc2]). + +### Added + +- The `dotenv_path` argument of `set_key` and `unset_key` now has a type of `Union[str, + os.PathLike]` instead of just `os.PathLike` (#347 by [@bbc2]). +- The `stream` argument of `load_dotenv` and `dotenv_values` can now be a text stream + (`IO[str]`), which includes values like `io.StringIO("foo")` and `open("file.env", + "r")` (#348 by [@bbc2]). + +## [0.18.0] - 2021-06-20 + +### Changed + +- Raise `ValueError` if `quote_mode` isn't one of `always`, `auto` or `never` in + `set_key` (#330 by [@bbc2]). +- When writing a value to a .env file with `set_key` or `dotenv set ` (#330 + by [@bbc2]): + - Use single quotes instead of double quotes. + - Don't strip surrounding quotes. + - In `auto` mode, don't add quotes if the value is only made of alphanumeric characters + (as determined by `string.isalnum`). + +## [0.17.1] - 2021-04-29 + +### Fixed + +- Fixed tests for build environments relying on `PYTHONPATH` (#318 by [@befeleme]). + +## [0.17.0] - 2021-04-02 + +### Changed + +- Make `dotenv get ` only show the value, not `key=value` (#313 by [@bbc2]). + +### Added + +- Add `--override`/`--no-override` option to `dotenv run` (#312 by [@zueve] and [@bbc2]). + +## [0.16.0] - 2021-03-27 + +### Changed + +- The default value of the `encoding` parameter for `load_dotenv` and `dotenv_values` is + now `"utf-8"` instead of `None` (#306 by [@bbc2]). +- Fix resolution order in variable expansion with `override=False` (#287 by [@bbc2]). + +## [0.15.0] - 2020-10-28 + +### Added + +- Add `--export` option to `set` to make it prepend the binding with `export` (#270 by + [@jadutter]). + +### Changed + +- Make `set` command create the `.env` file in the current directory if no `.env` file was + found (#270 by [@jadutter]). + +### Fixed + +- Fix potentially empty expanded value for duplicate key (#260 by [@bbc2]). +- Fix import error on Python 3.5.0 and 3.5.1 (#267 by [@gongqingkui]). +- Fix parsing of unquoted values containing several adjacent space or tab characters + (#277 by [@bbc2], review by [@x-yuri]). + +## [0.14.0] - 2020-07-03 + +### Changed + +- Privilege definition in file over the environment in variable expansion (#256 by + [@elbehery95]). + +### Fixed + +- Improve error message for when file isn't found (#245 by [@snobu]). +- Use HTTPS URL in package meta data (#251 by [@ekohl]). + +## [0.13.0] - 2020-04-16 + +### Added + +- Add support for a Bash-like default value in variable expansion (#248 by [@bbc2]). + +## [0.12.0] - 2020-02-28 + +### Changed + +- Use current working directory to find `.env` when bundled by PyInstaller (#213 by + [@gergelyk]). + +### Fixed + +- Fix escaping of quoted values written by `set_key` (#236 by [@bbc2]). +- Fix `dotenv run` crashing on environment variables without values (#237 by [@yannham]). +- Remove warning when last line is empty (#238 by [@bbc2]). + +## [0.11.0] - 2020-02-07 + +### Added + +- Add `interpolate` argument to `load_dotenv` and `dotenv_values` to disable interpolation + (#232 by [@ulyssessouza]). + +### Changed + +- Use logging instead of warnings (#231 by [@bbc2]). + +### Fixed + +- Fix installation in non-UTF-8 environments (#225 by [@altendky]). +- Fix PyPI classifiers (#228 by [@bbc2]). + +## [0.10.5] - 2020-01-19 + +### Fixed + +- Fix handling of malformed lines and lines without a value (#222 by [@bbc2]): + - Don't print warning when key has no value. + - Reject more malformed lines (e.g. "A: B", "a='b',c"). +- Fix handling of lines with just a comment (#224 by [@bbc2]). + +## [0.10.4] - 2020-01-17 + +### Added + +- Make typing optional (#179 by [@techalchemy]). +- Print a warning on malformed line (#211 by [@bbc2]). +- Support keys without a value (#220 by [@ulyssessouza]). + +## 0.10.3 + +- Improve interactive mode detection ([@andrewsmith])([#183]). +- Refactor parser to fix parsing inconsistencies ([@bbc2])([#170]). + - Interpret escapes as control characters only in double-quoted strings. + - Interpret `#` as start of comment only if preceded by whitespace. + +## 0.10.2 + +- Add type hints and expose them to users ([@qnighy])([#172]) +- `load_dotenv` and `dotenv_values` now accept an `encoding` parameter, defaults to `None` + ([@theskumar])([@earlbread])([#161]) +- Fix `str`/`unicode` inconsistency in Python 2: values are always `str` now. ([@bbc2])([#121]) +- Fix Unicode error in Python 2, introduced in 0.10.0. ([@bbc2])([#176]) + +## 0.10.1 +- Fix parsing of variable without a value ([@asyncee])([@bbc2])([#158]) + +## 0.10.0 + +- Add support for UTF-8 in unquoted values ([@bbc2])([#148]) +- Add support for trailing comments ([@bbc2])([#148]) +- Add backslashes support in values ([@bbc2])([#148]) +- Add support for newlines in values ([@bbc2])([#148]) +- Force environment variables to str with Python2 on Windows ([@greyli]) +- Drop Python 3.3 support ([@greyli]) +- Fix stderr/-out/-in redirection ([@venthur]) + + +## 0.9.0 + +- Add `--version` parameter to cli ([@venthur]) +- Enable loading from current directory ([@cjauvin]) +- Add 'dotenv run' command for calling arbitrary shell script with .env ([@venthur]) + +## 0.8.1 + +- Add tests for docs ([@Flimm]) +- Make 'cli' support optional. Use `pip install python-dotenv[cli]`. ([@theskumar]) + +## 0.8.0 + +- `set_key` and `unset_key` only modified the affected file instead of + parsing and re-writing file, this causes comments and other file + entact as it is. +- Add support for `export` prefix in the line. +- Internal refractoring ([@theskumar]) +- Allow `load_dotenv` and `dotenv_values` to work with `StringIO())` ([@alanjds])([@theskumar])([#78]) + +## 0.7.1 + +- Remove hard dependency on iPython ([@theskumar]) + +## 0.7.0 + +- Add support to override system environment variable via .env. + ([@milonimrod](https://github.com/milonimrod)) + ([\#63](https://github.com/theskumar/python-dotenv/issues/63)) +- Disable ".env not found" warning by default + ([@maxkoryukov](https://github.com/maxkoryukov)) + ([\#57](https://github.com/theskumar/python-dotenv/issues/57)) + +## 0.6.5 + +- Add support for special characters `\`. + ([@pjona](https://github.com/pjona)) + ([\#60](https://github.com/theskumar/python-dotenv/issues/60)) + +## 0.6.4 + +- Fix issue with single quotes ([@Flimm]) + ([\#52](https://github.com/theskumar/python-dotenv/issues/52)) + +## 0.6.3 + +- Handle unicode exception in setup.py + ([\#46](https://github.com/theskumar/python-dotenv/issues/46)) + +## 0.6.2 + +- Fix dotenv list command ([@ticosax](https://github.com/ticosax)) +- Add iPython Support + ([@tillahoffmann](https://github.com/tillahoffmann)) + +## 0.6.0 + +- Drop support for Python 2.6 +- Handle escaped characters and newlines in quoted values. (Thanks + [@iameugenejo](https://github.com/iameugenejo)) +- Remove any spaces around unquoted key/value. (Thanks + [@paulochf](https://github.com/paulochf)) +- Added POSIX variable expansion. (Thanks + [@hugochinchilla](https://github.com/hugochinchilla)) + +## 0.5.1 + +- Fix find\_dotenv - it now start search from the file where this + function is called from. + +## 0.5.0 + +- Add `find_dotenv` method that will try to find a `.env` file. + (Thanks [@isms](https://github.com/isms)) + +## 0.4.0 + +- cli: Added `-q/--quote` option to control the behaviour of quotes + around values in `.env`. (Thanks + [@hugochinchilla](https://github.com/hugochinchilla)). +- Improved test coverage. + +[#78]: https://github.com/theskumar/python-dotenv/issues/78 +[#121]: https://github.com/theskumar/python-dotenv/issues/121 +[#148]: https://github.com/theskumar/python-dotenv/issues/148 +[#158]: https://github.com/theskumar/python-dotenv/issues/158 +[#170]: https://github.com/theskumar/python-dotenv/issues/170 +[#172]: https://github.com/theskumar/python-dotenv/issues/172 +[#176]: https://github.com/theskumar/python-dotenv/issues/176 +[#183]: https://github.com/theskumar/python-dotenv/issues/183 +[#359]: https://github.com/theskumar/python-dotenv/issues/359 + +[@Flimm]: https://github.com/Flimm +[@alanjds]: https://github.com/alanjds +[@altendky]: https://github.com/altendky +[@andrewsmith]: https://github.com/andrewsmith +[@asyncee]: https://github.com/asyncee +[@bbc2]: https://github.com/bbc2 +[@befeleme]: https://github.com/befeleme +[@cjauvin]: https://github.com/cjauvin +[@earlbread]: https://github.com/earlbread +[@ekohl]: https://github.com/ekohl +[@elbehery95]: https://github.com/elbehery95 +[@gergelyk]: https://github.com/gergelyk +[@gongqingkui]: https://github.com/gongqingkui +[@greyli]: https://github.com/greyli +[@jadutter]: https://github.com/jadutter +[@mgorny]: https://github.com/mgorny +[@qnighy]: https://github.com/qnighy +[@snobu]: https://github.com/snobu +[@techalchemy]: https://github.com/techalchemy +[@theskumar]: https://github.com/theskumar +[@ulyssessouza]: https://github.com/ulyssessouza +[@venthur]: https://github.com/venthur +[@x-yuri]: https://github.com/x-yuri +[@yannham]: https://github.com/yannham +[@zueve]: https://github.com/zueve + +[Unreleased]: https://github.com/theskumar/python-dotenv/compare/v0.20.0...HEAD +[0.19.2]: https://github.com/theskumar/python-dotenv/compare/v0.19.2...v0.20.0 +[0.19.2]: https://github.com/theskumar/python-dotenv/compare/v0.19.1...v0.19.2 +[0.19.1]: https://github.com/theskumar/python-dotenv/compare/v0.19.0...v0.19.1 +[0.19.0]: https://github.com/theskumar/python-dotenv/compare/v0.18.0...v0.19.0 +[0.18.0]: https://github.com/theskumar/python-dotenv/compare/v0.17.1...v0.18.0 +[0.17.1]: https://github.com/theskumar/python-dotenv/compare/v0.17.0...v0.17.1 +[0.17.0]: https://github.com/theskumar/python-dotenv/compare/v0.16.0...v0.17.0 +[0.16.0]: https://github.com/theskumar/python-dotenv/compare/v0.15.0...v0.16.0 +[0.15.0]: https://github.com/theskumar/python-dotenv/compare/v0.14.0...v0.15.0 +[0.14.0]: https://github.com/theskumar/python-dotenv/compare/v0.13.0...v0.14.0 +[0.13.0]: https://github.com/theskumar/python-dotenv/compare/v0.12.0...v0.13.0 +[0.12.0]: https://github.com/theskumar/python-dotenv/compare/v0.11.0...v0.12.0 +[0.11.0]: https://github.com/theskumar/python-dotenv/compare/v0.10.5...v0.11.0 +[0.10.5]: https://github.com/theskumar/python-dotenv/compare/v0.10.4...v0.10.5 +[0.10.4]: https://github.com/theskumar/python-dotenv/compare/v0.10.3...v0.10.4 + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/RECORD new file mode 100644 index 00000000..aac2c1ce --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/RECORD @@ -0,0 +1,23 @@ +../../Scripts/dotenv.exe,sha256=7W38K2jr6dBO5wbTIh9H2nxwLFim-YCd6AANvQhaBgg,106369 +dotenv/__init__.py,sha256=bdAGaaBAOc_Hyqqy8UOVjedJDgUNqMTLlmqcOrMCdC8,1298 +dotenv/__pycache__/__init__.cpython-37.pyc,, +dotenv/__pycache__/cli.cpython-37.pyc,, +dotenv/__pycache__/ipython.cpython-37.pyc,, +dotenv/__pycache__/main.cpython-37.pyc,, +dotenv/__pycache__/parser.cpython-37.pyc,, +dotenv/__pycache__/variables.cpython-37.pyc,, +dotenv/__pycache__/version.cpython-37.pyc,, +dotenv/cli.py,sha256=MfmIEYbhDhANP7CzAkgDJUahjKcY5mOjCbDEnJNvqhI,4777 +dotenv/ipython.py,sha256=avI6aez_RxnBptYgchIquF2TSgKI-GOhY3ppiu3VuWE,1303 +dotenv/main.py,sha256=wBUr-lDnDo7fKAi8oHJLxRlna4wvoGTgJdJNTikrGw0,11692 +dotenv/parser.py,sha256=HMB0VVy_fejMuGZ_Fdigzln_dBE9nsAAaZ7olwoAZSw,5298 +dotenv/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 +dotenv/variables.py,sha256=HtvYMOOyyogGGZv-YkGzeMOKtovvqqGzh_kIB9xuAto,2404 +dotenv/version.py,sha256=NGIecTe1EEM7UeBjKSJ4vCWuGDWF1ZX4PckW2Eguxps,23 +python_dotenv-0.20.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +python_dotenv-0.20.0.dist-info/LICENSE,sha256=0nIJqz0WJ4Ko-OOHK5s1PEngksmqRnpkUiiDQH2NEDA,4600 +python_dotenv-0.20.0.dist-info/METADATA,sha256=fPA8H1g_zWafVdlx7u-TCw3mFyBDJxZh6JmuOdJDxY0,19376 +python_dotenv-0.20.0.dist-info/RECORD,, +python_dotenv-0.20.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +python_dotenv-0.20.0.dist-info/entry_points.txt,sha256=Ta6e0xl3qUUz_ZZ1D1YAR6SKDHNGBPLcHqfzyXq4TVk,43 +python_dotenv-0.20.0.dist-info/top_level.txt,sha256=eyqUH4SHJNr6ahOYlxIunTr4XinE8Z5ajWLdrK3r0D8,7 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/entry_points.txt new file mode 100644 index 00000000..deb9ff42 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +dotenv = dotenv.cli:cli + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/top_level.txt new file mode 100644 index 00000000..fe7c01aa --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/python_dotenv-0.20.0.dist-info/top_level.txt @@ -0,0 +1 @@ +dotenv diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE new file mode 100644 index 00000000..51f34429 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the +licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to are +made under the terms of *both* these licenses. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.APACHE2 b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.APACHE2 new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.APACHE2 @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.MIT b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.MIT new file mode 100644 index 00000000..b8bb9718 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/LICENSE.MIT @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/METADATA new file mode 100644 index 00000000..0e60193c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: sniffio +Version: 1.2.0 +Summary: Sniff out which async library your code is running under +Home-page: https://github.com/python-trio/sniffio +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT -or- Apache License 2.0 +Keywords: async,trio,asyncio +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Framework :: Trio +Classifier: Framework :: AsyncIO +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Intended Audience :: Developers +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.5 +Requires-Dist: contextvars (>=2.1) ; python_version < '3.7' + +.. image:: https://img.shields.io/badge/chat-join%20now-blue.svg + :target: https://gitter.im/python-trio/general + :alt: Join chatroom + +.. image:: https://img.shields.io/badge/docs-read%20now-blue.svg + :target: https://sniffio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/v/sniffio.svg + :target: https://pypi.org/project/sniffio + :alt: Latest PyPi version + +.. image:: https://img.shields.io/conda/vn/conda-forge/sniffio.svg + :target: https://anaconda.org/conda-forge/sniffio + :alt: Latest conda-forge version + +.. image:: https://travis-ci.org/python-trio/sniffio.svg?branch=master + :target: https://travis-ci.org/python-trio/sniffio + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-trio/sniffio/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-trio/sniffio + :alt: Test coverage + +================================================================= +sniffio: Sniff out which async library your code is running under +================================================================= + +You're writing a library. You've decided to be ambitious, and support +multiple async I/O packages, like `Trio +`__, and `asyncio +`__, and ... You've +written a bunch of clever code to handle all the differences. But... +how do you know *which* piece of clever code to run? + +This is a tiny package whose only purpose is to let you detect which +async library your code is running under. + +* Documentation: https://sniffio.readthedocs.io + +* Bug tracker and source code: https://github.com/python-trio/sniffio + +* License: MIT or Apache License 2.0, your choice + +* Contributor guide: https://trio.readthedocs.io/en/latest/contributing.html + +* Code of conduct: Contributors are requested to follow our `code of + conduct + `_ + in all project spaces. + +This library is maintained by the Trio project, as a service to the +async Python community as a whole. + + +Quickstart +---------- + +.. code-block:: python3 + + from sniffio import current_async_library + import trio + import asyncio + + async def print_library(): + library = current_async_library() + print("This is:", library) + + # Prints "This is trio" + trio.run(print_library) + + # Prints "This is asyncio" + asyncio.run(print_library()) + +For more details, including how to add support to new async libraries, +`please peruse our fine manual `__. + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/RECORD new file mode 100644 index 00000000..caf2430a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/RECORD @@ -0,0 +1,19 @@ +sniffio-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sniffio-1.2.0.dist-info/LICENSE,sha256=ZSyHhIjRRWNh4Iw_hgf9e6WYkqFBA9Fczk_5PIW1zIs,185 +sniffio-1.2.0.dist-info/LICENSE.APACHE2,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +sniffio-1.2.0.dist-info/LICENSE.MIT,sha256=Pm2uVV65J4f8gtHUg1Vnf0VMf2Wus40_nnK_mj2vA0s,1046 +sniffio-1.2.0.dist-info/METADATA,sha256=1feG2RpWgvrsxlrHGBdQWr2qz_J9T2dNk9gHwBniB4o,3597 +sniffio-1.2.0.dist-info/RECORD,, +sniffio-1.2.0.dist-info/WHEEL,sha256=EVRjI69F5qVjm_YgqcTXPnTAv3BfSUr0WVAHuSP3Xoo,92 +sniffio-1.2.0.dist-info/top_level.txt,sha256=v9UJXGs5CyddCVeAqXkQiWOrpp6Wtx6GeRrPt9-jjHg,8 +sniffio/__init__.py,sha256=OVpJIlvGte9qZSi0tbZNiIF6cxVlNIkQY09sNw80m6Y,292 +sniffio/__pycache__/__init__.cpython-37.pyc,, +sniffio/__pycache__/_impl.cpython-37.pyc,, +sniffio/__pycache__/_version.cpython-37.pyc,, +sniffio/_impl.py,sha256=gtPkeGhus65ffGiTtv1w3qaMrfJI04ubcVMrWMW2_5Q,2721 +sniffio/_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sniffio/_tests/__pycache__/__init__.cpython-37.pyc,, +sniffio/_tests/__pycache__/test_sniffio.cpython-37.pyc,, +sniffio/_tests/test_sniffio.py,sha256=yTv4bRyHH0ay5RpqWtdbzHFD6BV02xlBax61en_-OR8,1648 +sniffio/_version.py,sha256=codhqWKYFbR_gKABvhb2_aWOKwuRo4YQlLcENJOvFkQ,89 +sniffio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/WHEEL new file mode 100644 index 00000000..83ff02e9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/top_level.txt new file mode 100644 index 00000000..01c65024 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio-1.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +sniffio diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/__init__.py new file mode 100644 index 00000000..ddbd53a5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/__init__.py @@ -0,0 +1,14 @@ +"""Top-level package for sniffio.""" + +__all__ = [ + "current_async_library", "AsyncLibraryNotFoundError", + "current_async_library_cvar" +] + +from ._version import __version__ + +from ._impl import ( + current_async_library, + AsyncLibraryNotFoundError, + current_async_library_cvar, +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_impl.py new file mode 100644 index 00000000..240d3a2e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_impl.py @@ -0,0 +1,83 @@ +from contextvars import ContextVar +from typing import Optional +import sys + +current_async_library_cvar = ContextVar( + "current_async_library_cvar", default=None +) # type: ContextVar[Optional[str]] + + +class AsyncLibraryNotFoundError(RuntimeError): + pass + + +def current_async_library() -> str: + """Detect which async library is currently running. + + The following libraries are currently supported: + + ================ =========== ============================ + Library Requires Magic string + ================ =========== ============================ + **Trio** Trio v0.6+ ``"trio"`` + **Curio** - ``"curio"`` + **asyncio** ``"asyncio"`` + **Trio-asyncio** v0.8.2+ ``"trio"`` or ``"asyncio"``, + depending on current mode + ================ =========== ============================ + + Returns: + A string like ``"trio"``. + + Raises: + AsyncLibraryNotFoundError: if called from synchronous context, + or if the current async library was not recognized. + + Examples: + + .. code-block:: python3 + + from sniffio import current_async_library + + async def generic_sleep(seconds): + library = current_async_library() + if library == "trio": + import trio + await trio.sleep(seconds) + elif library == "asyncio": + import asyncio + await asyncio.sleep(seconds) + # ... and so on ... + else: + raise RuntimeError(f"Unsupported library {library!r}") + + """ + value = current_async_library_cvar.get() + if value is not None: + return value + + # Sniff for curio (for now) + if 'curio' in sys.modules: + from curio.meta import curio_running + if curio_running(): + return 'curio' + + # Need to sniff for asyncio + if "asyncio" in sys.modules: + import asyncio + try: + current_task = asyncio.current_task # type: ignore[attr-defined] + except AttributeError: + current_task = asyncio.Task.current_task # type: ignore[attr-defined] + try: + if current_task() is not None: + if (3, 7) <= sys.version_info: + # asyncio has contextvars support, and we're in a task, so + # we can safely cache the sniffed value + current_async_library_cvar.set("asyncio") + return "asyncio" + except RuntimeError: + pass + raise AsyncLibraryNotFoundError( + "unknown async library, or not in async context" + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_tests/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_tests/test_sniffio.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_tests/test_sniffio.py new file mode 100644 index 00000000..a19d13c9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_tests/test_sniffio.py @@ -0,0 +1,67 @@ +import sys + +import pytest + +from .. import ( + current_async_library, AsyncLibraryNotFoundError, + current_async_library_cvar +) + + +def test_basics(): + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + token = current_async_library_cvar.set("generic-lib") + try: + assert current_async_library() == "generic-lib" + finally: + current_async_library_cvar.reset(token) + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + +def test_asyncio(): + import asyncio + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + ran = [] + + async def this_is_asyncio(): + assert current_async_library() == "asyncio" + # Call it a second time to exercise the caching logic + assert current_async_library() == "asyncio" + ran.append(True) + + loop = asyncio.get_event_loop() + loop.run_until_complete(this_is_asyncio()) + assert ran == [True] + loop.close() + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + +@pytest.mark.skipif(sys.version_info < (3, 6), reason='Curio requires 3.6+') +def test_curio(): + import curio + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() + + ran = [] + + async def this_is_curio(): + assert current_async_library() == "curio" + # Call it a second time to exercise the caching logic + assert current_async_library() == "curio" + ran.append(True) + + curio.run(this_is_curio) + assert ran == [True] + + with pytest.raises(AsyncLibraryNotFoundError): + current_async_library() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_version.py new file mode 100644 index 00000000..7c4b1198 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/_version.py @@ -0,0 +1,3 @@ +# This file is imported from __init__.py and exec'd from setup.py + +__version__ = "1.2.0" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/sniffio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/LICENSE.md b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/LICENSE.md new file mode 100644 index 00000000..d16a60ec --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2018, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/METADATA new file mode 100644 index 00000000..acf7f65f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/METADATA @@ -0,0 +1,179 @@ +Metadata-Version: 2.1 +Name: starlette +Version: 0.19.1 +Summary: The little ASGI library that shines. +Home-page: https://github.com/encode/starlette +Author: Tom Christie +Author-email: tom@tomchristie.com +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Framework :: AnyIO +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +Requires-Dist: anyio (<5,>=3.4.0) +Requires-Dist: typing-extensions (>=3.10.0) ; python_version < "3.10" +Requires-Dist: contextlib2 (>=21.6.0) ; python_version < "3.7" +Provides-Extra: full +Requires-Dist: itsdangerous ; extra == 'full' +Requires-Dist: jinja2 ; extra == 'full' +Requires-Dist: python-multipart ; extra == 'full' +Requires-Dist: pyyaml ; extra == 'full' +Requires-Dist: requests ; extra == 'full' + +

+ starlette +

+

+ ✨ The little ASGI framework that shines. ✨ +

+

+ + Build Status + + + Package version + +

+ +--- + +**Documentation**: [https://www.starlette.io/](https://www.starlette.io/) + +--- + +# Starlette + +Starlette is a lightweight [ASGI][asgi] framework/toolkit, +which is ideal for building async web services in Python. + +It is production-ready, and gives you the following: + +* A lightweight, low-complexity HTTP web framework. +* WebSocket support. +* In-process background tasks. +* Startup and shutdown events. +* Test client built on `requests`. +* CORS, GZip, Static Files, Streaming responses. +* Session and Cookie support. +* 100% test coverage. +* 100% type annotated codebase. +* Few hard dependencies. +* Compatible with `asyncio` and `trio` backends. +* Great overall performance [against independant benchmarks][techempower]. + +## Requirements + +Python 3.6+ + +## Installation + +```shell +$ pip3 install starlette +``` + +You'll also want to install an ASGI server, such as [uvicorn](http://www.uvicorn.org/), [daphne](https://github.com/django/daphne/), or [hypercorn](https://pgjones.gitlab.io/hypercorn/). + +```shell +$ pip3 install uvicorn +``` + +## Example + +**example.py**: + +```python +from starlette.applications import Starlette +from starlette.responses import JSONResponse +from starlette.routing import Route + + +async def homepage(request): + return JSONResponse({'hello': 'world'}) + +routes = [ + Route("/", endpoint=homepage) +] + +app = Starlette(debug=True, routes=routes) +``` + +Then run the application using Uvicorn: + +```shell +$ uvicorn example:app +``` + +For a more complete example, see [encode/starlette-example](https://github.com/encode/starlette-example). + +## Dependencies + +Starlette only requires `anyio`, and the following are optional: + +* [`requests`][requests] - Required if you want to use the `TestClient`. +* [`jinja2`][jinja2] - Required if you want to use `Jinja2Templates`. +* [`python-multipart`][python-multipart] - Required if you want to support form parsing, with `request.form()`. +* [`itsdangerous`][itsdangerous] - Required for `SessionMiddleware` support. +* [`pyyaml`][pyyaml] - Required for `SchemaGenerator` support. + +You can install all of these with `pip3 install starlette[full]`. + +## Framework or Toolkit + +Starlette is designed to be used either as a complete framework, or as +an ASGI toolkit. You can use any of its components independently. + +```python +from starlette.responses import PlainTextResponse + + +async def app(scope, receive, send): + assert scope['type'] == 'http' + response = PlainTextResponse('Hello, world!') + await response(scope, receive, send) +``` + +Run the `app` application in `example.py`: + +```shell +$ uvicorn example:app +INFO: Started server process [11509] +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +``` + +Run uvicorn with `--reload` to enable auto-reloading on code changes. + +## Modularity + +The modularity that Starlette is designed on promotes building re-usable +components that can be shared between any ASGI framework. This should enable +an ecosystem of shared middleware and mountable applications. + +The clean API separation also means it's easier to understand each component +in isolation. + +--- + +

Starlette is BSD licensed code.
Designed & crafted with care.

— ⭐️ —

+ +[asgi]: https://asgi.readthedocs.io/en/latest/ +[requests]: http://docs.python-requests.org/en/master/ +[jinja2]: http://jinja.pocoo.org/ +[python-multipart]: https://andrew-d.github.io/python-multipart/ +[itsdangerous]: https://pythonhosted.org/itsdangerous/ +[sqlalchemy]: https://www.sqlalchemy.org +[pyyaml]: https://pyyaml.org/wiki/PyYAMLDocumentation +[techempower]: https://www.techempower.com/benchmarks/#hw=ph&test=fortune&l=zijzen-sf + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/RECORD new file mode 100644 index 00000000..0b36f8d6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/RECORD @@ -0,0 +1,73 @@ +starlette-0.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +starlette-0.19.1.dist-info/LICENSE.md,sha256=3LlWd6AiQCQxh-lk-UGEfRmxeCHPmeWvrmhPqzKMGb8,1518 +starlette-0.19.1.dist-info/METADATA,sha256=LPIAI5r0i7IIFTq8td5P9EN1RqMM7EvM5CUSwxvaMS0,5607 +starlette-0.19.1.dist-info/RECORD,, +starlette-0.19.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +starlette-0.19.1.dist-info/top_level.txt,sha256=eWSxNxwzxXl9k09QFS8z1V7D3gCryfl1rTObkPBMRJI,10 +starlette/__init__.py,sha256=ACwqBctK6p_9Oj2rXWiSmVOPJURrCCh7l_Ak1Mk2h-o,23 +starlette/__pycache__/__init__.cpython-37.pyc,, +starlette/__pycache__/_compat.cpython-37.pyc,, +starlette/__pycache__/_pep562.cpython-37.pyc,, +starlette/__pycache__/applications.cpython-37.pyc,, +starlette/__pycache__/authentication.cpython-37.pyc,, +starlette/__pycache__/background.cpython-37.pyc,, +starlette/__pycache__/concurrency.cpython-37.pyc,, +starlette/__pycache__/config.cpython-37.pyc,, +starlette/__pycache__/convertors.cpython-37.pyc,, +starlette/__pycache__/datastructures.cpython-37.pyc,, +starlette/__pycache__/endpoints.cpython-37.pyc,, +starlette/__pycache__/exceptions.cpython-37.pyc,, +starlette/__pycache__/formparsers.cpython-37.pyc,, +starlette/__pycache__/requests.cpython-37.pyc,, +starlette/__pycache__/responses.cpython-37.pyc,, +starlette/__pycache__/routing.cpython-37.pyc,, +starlette/__pycache__/schemas.cpython-37.pyc,, +starlette/__pycache__/staticfiles.cpython-37.pyc,, +starlette/__pycache__/status.cpython-37.pyc,, +starlette/__pycache__/templating.cpython-37.pyc,, +starlette/__pycache__/testclient.cpython-37.pyc,, +starlette/__pycache__/types.cpython-37.pyc,, +starlette/__pycache__/websockets.cpython-37.pyc,, +starlette/_compat.py,sha256=yqCApnDFpPoAiMrEF5CnOiTngAb6_pPTR2ykPtxWCb4,1149 +starlette/_pep562.py,sha256=etty8LQhAsUcbZ6r2ZlSpyUqG20_agArEz1gGiByZgY,2779 +starlette/applications.py,sha256=qYmvarqXgwLa_juii4pWoYLWi0kvwWVNzwfh7ds5ons,9925 +starlette/authentication.py,sha256=nP0IcnJNNIB04QOJ5K66TsaTIRhOD3OMzJWVgQGp2hQ,5090 +starlette/background.py,sha256=i_spMdhZWNA4Q5EMEYnTC4qVwlbH2A7LhkJr1EG0BKU,1237 +starlette/concurrency.py,sha256=IIxlJASchhv7T1ik_WQRuN-6Pd0r--_x4gqt9odO0Yc,1741 +starlette/config.py,sha256=wD1dp9XuNn8ypUrSwREOTsY1ZMV-OU5my9xczmRxltM,4488 +starlette/convertors.py,sha256=G5OFkuGVGkaoJDLrrrMjqbmFtsvybzJ0VpLsC-0xH5I,2166 +starlette/datastructures.py,sha256=G1Giw0Ozb_Yib7f3HimmuRtAN7T9UYN1H9mvjiWImTo,22178 +starlette/endpoints.py,sha256=tpOo3BfcMNQC-SoHNPS7BSWCG5u2NZZTtvAF3YWB5so,5123 +starlette/exceptions.py,sha256=wWC8ik8U0cucaOEkK6a9bxgEa_L0r3YFAEvjNdr3IPw,3909 +starlette/formparsers.py,sha256=JqrbOqbOhHvR4K-I1wKV2qUzbbFPSIm43dTVRDmo2UI,8747 +starlette/middleware/__init__.py,sha256=tEegvh1lBhUfHaJ0p-T_9FU7PRT1WHoIc2vZX0pUlm8,546 +starlette/middleware/__pycache__/__init__.cpython-37.pyc,, +starlette/middleware/__pycache__/authentication.cpython-37.pyc,, +starlette/middleware/__pycache__/base.cpython-37.pyc,, +starlette/middleware/__pycache__/cors.cpython-37.pyc,, +starlette/middleware/__pycache__/errors.cpython-37.pyc,, +starlette/middleware/__pycache__/gzip.cpython-37.pyc,, +starlette/middleware/__pycache__/httpsredirect.cpython-37.pyc,, +starlette/middleware/__pycache__/sessions.cpython-37.pyc,, +starlette/middleware/__pycache__/trustedhost.cpython-37.pyc,, +starlette/middleware/__pycache__/wsgi.cpython-37.pyc,, +starlette/middleware/authentication.py,sha256=rP3t8t7HoO3xW8LH48Yo_GY8jzyx7WCrT88m_1O_kJ4,1787 +starlette/middleware/base.py,sha256=uG_c8TgZ23oSXQt1-DIHzonZLbzjeBUQc5CfL1DjtyM,2661 +starlette/middleware/cors.py,sha256=x_pPPhDxTKZq4Zue3dMjzvSI580e_QZG6xqP9OgJ4iM,7076 +starlette/middleware/errors.py,sha256=9BErqdMUSzRPF6aKIL3sK5EHQIVA31B5X6N6C9P76vc,7771 +starlette/middleware/gzip.py,sha256=GJlMYl-GHhkDIPIRchvbat3Fi-DRMITt8zHN6pq2OlQ,4087 +starlette/middleware/httpsredirect.py,sha256=SNTleaYALGoITV7xwbic4gB6VYdM8Ylea_ykciUz31g,848 +starlette/middleware/sessions.py,sha256=h0yeRBo5QZ2LV30p4jvgkbLYO3HNNkWNESeCncSY7L4,3421 +starlette/middleware/trustedhost.py,sha256=5oTnnGRXQC4KJTROs802G9tJzBij2zI9ze11aEdbins,2207 +starlette/middleware/wsgi.py,sha256=YbjlcETXYzaN5--Rk2uC2vEvxAiKwuvWQ7IuOFjU3H4,4906 +starlette/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +starlette/requests.py,sha256=A3ofuUDW_pdc_E5Ui0RC4m7QbTt86MlFLSBylrcG22Y,9578 +starlette/responses.py,sha256=DCESKs_eNeu_PvixQ42H6dhcQ6J-AfhsS44VwaC7wko,12392 +starlette/routing.py,sha256=lN4-tveWiCN4NW3F3kn6GTx29GEE35wPeYYZRPowb-k,30160 +starlette/schemas.py,sha256=K9bDGPCMN-ikPS9hTtl2Bp30LH-O0P_dSGOmS-oDNTA,4474 +starlette/staticfiles.py,sha256=vJM-osz1YCAtml40WIdq96pJv3TLpFL5Xa6-le4DLUA,8499 +starlette/status.py,sha256=qMYRRGo51Lgy_QVnCsG65wDl-yJcIHNusxTzR0cdDQQ,6223 +starlette/templating.py,sha256=dwKMdegSRx-r3MUnDWHiXmz2GI_3sC02EATBJinyqbk,3620 +starlette/testclient.py,sha256=FVT6gx5yJ6E4FgMbFO-7Gf3RZ-6Fu7zW1p9ZU18IyDQ,19673 +starlette/types.py,sha256=RbisZ8DEsquztH1HwK6_8Iy5ZvQwddYcDDFW32KEN3o,302 +starlette/websockets.py,sha256=dCDRvGsp62FXrxDv320P7fv5QggUK21kneokFq9juWQ,7317 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/top_level.txt new file mode 100644 index 00000000..b8a0f926 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette-0.19.1.dist-info/top_level.txt @@ -0,0 +1 @@ +starlette diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/__init__.py new file mode 100644 index 00000000..4c1ca3c8 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/__init__.py @@ -0,0 +1 @@ +__version__ = "0.19.1" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_compat.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_compat.py new file mode 100644 index 00000000..11656191 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_compat.py @@ -0,0 +1,29 @@ +import hashlib + +# Compat wrapper to always include the `usedforsecurity=...` parameter, +# which is only added from Python 3.9 onwards. +# We use this flag to indicate that we use `md5` hashes only for non-security +# cases (our ETag checksums). +# If we don't indicate that we're using MD5 for non-security related reasons, +# then attempting to use this function will raise an error when used +# environments which enable a strict "FIPs mode". +# +# See issue: https://github.com/encode/starlette/issues/1365 +try: + + # check if the Python version supports the parameter + # using usedforsecurity=False to avoid an exception on FIPS systems + # that reject usedforsecurity=True + hashlib.md5(b"data", usedforsecurity=False) # type: ignore[call-arg] + + def md5_hexdigest( + data: bytes, *, usedforsecurity: bool = True + ) -> str: # pragma: no cover + return hashlib.md5( # type: ignore[call-arg] + data, usedforsecurity=usedforsecurity + ).hexdigest() + +except TypeError: # pragma: no cover + + def md5_hexdigest(data: bytes, *, usedforsecurity: bool = True) -> str: + return hashlib.md5(data).hexdigest() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_pep562.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_pep562.py new file mode 100644 index 00000000..a4757eac --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/_pep562.py @@ -0,0 +1,62 @@ +# flake8: noqa +""" +Backport of PEP 562. +https://pypi.org/search/?q=pep562 +Licensed under MIT +Copyright (c) 2018 Isaac Muse +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all copies or substantial portions +of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" +import sys +from typing import Any, Callable, List, Optional + + +class Pep562: + """ + Backport of PEP 562 . + Wraps the module in a class that exposes the mechanics to override `__dir__` and `__getattr__`. + The given module will be searched for overrides of `__dir__` and `__getattr__` and use them when needed. + """ + + def __init__(self, name: str) -> None: # pragma: no cover + """Acquire `__getattr__` and `__dir__`, but only replace module for versions less than Python 3.7.""" + + self._module = sys.modules[name] + self._get_attr = getattr(self._module, "__getattr__", None) + self._get_dir: Optional[Callable[..., List[str]]] = getattr( + self._module, "__dir__", None + ) + sys.modules[name] = self # type: ignore[assignment] + + def __dir__(self) -> List[str]: # pragma: no cover + """Return the overridden `dir` if one was provided, else apply `dir` to the module.""" + + return self._get_dir() if self._get_dir else dir(self._module) + + def __getattr__(self, name: str) -> Any: # pragma: no cover + """ + Attempt to retrieve the attribute from the module, and if missing, use the overridden function if present. + """ + + try: + return getattr(self._module, name) + except AttributeError: + if self._get_attr: + return self._get_attr(name) + raise + + +def pep562(module_name: str) -> None: # pragma: no cover + """Helper function to apply PEP 562.""" + + if sys.version_info < (3, 7): + Pep562(module_name) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/applications.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/applications.py new file mode 100644 index 00000000..8c515444 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/applications.py @@ -0,0 +1,283 @@ +import typing + +from starlette.datastructures import State, URLPath +from starlette.exceptions import ExceptionMiddleware +from starlette.middleware import Middleware +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.errors import ServerErrorMiddleware +from starlette.requests import Request +from starlette.responses import Response +from starlette.routing import BaseRoute, Router +from starlette.types import ASGIApp, Receive, Scope, Send + + +class Starlette: + """ + Creates an application instance. + + **Parameters:** + + * **debug** - Boolean indicating if debug tracebacks should be returned on errors. + * **routes** - A list of routes to serve incoming HTTP and WebSocket requests. + * **middleware** - A list of middleware to run for every request. A starlette + application will always automatically include two middleware classes. + `ServerErrorMiddleware` is added as the very outermost middleware, to handle + any uncaught errors occurring anywhere in the entire stack. + `ExceptionMiddleware` is added as the very innermost middleware, to deal + with handled exception cases occurring in the routing or endpoints. + * **exception_handlers** - A mapping of either integer status codes, + or exception class types onto callables which handle the exceptions. + Exception handler callables should be of the form + `handler(request, exc) -> response` and may be be either standard functions, or + async functions. + * **on_startup** - A list of callables to run on application startup. + Startup handler callables do not take any arguments, and may be be either + standard functions, or async functions. + * **on_shutdown** - A list of callables to run on application shutdown. + Shutdown handler callables do not take any arguments, and may be be either + standard functions, or async functions. + """ + + def __init__( + self, + debug: bool = False, + routes: typing.Optional[typing.Sequence[BaseRoute]] = None, + middleware: typing.Optional[typing.Sequence[Middleware]] = None, + exception_handlers: typing.Optional[ + typing.Mapping[ + typing.Any, + typing.Callable[ + [Request, Exception], + typing.Union[Response, typing.Awaitable[Response]], + ], + ] + ] = None, + on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None, + on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None, + lifespan: typing.Optional[ + typing.Callable[["Starlette"], typing.AsyncContextManager] + ] = None, + ) -> None: + # The lifespan context function is a newer style that replaces + # on_startup / on_shutdown handlers. Use one or the other, not both. + assert lifespan is None or ( + on_startup is None and on_shutdown is None + ), "Use either 'lifespan' or 'on_startup'/'on_shutdown', not both." + + self._debug = debug + self.state = State() + self.router = Router( + routes, on_startup=on_startup, on_shutdown=on_shutdown, lifespan=lifespan + ) + self.exception_handlers = ( + {} if exception_handlers is None else dict(exception_handlers) + ) + self.user_middleware = [] if middleware is None else list(middleware) + self.middleware_stack = self.build_middleware_stack() + + def build_middleware_stack(self) -> ASGIApp: + debug = self.debug + error_handler = None + exception_handlers: typing.Dict[ + typing.Any, typing.Callable[[Request, Exception], Response] + ] = {} + + for key, value in self.exception_handlers.items(): + if key in (500, Exception): + error_handler = value + else: + exception_handlers[key] = value + + middleware = ( + [Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)] + + self.user_middleware + + [ + Middleware( + ExceptionMiddleware, handlers=exception_handlers, debug=debug + ) + ] + ) + + app = self.router + for cls, options in reversed(middleware): + app = cls(app=app, **options) + return app + + @property + def routes(self) -> typing.List[BaseRoute]: + return self.router.routes + + @property + def debug(self) -> bool: + return self._debug + + @debug.setter + def debug(self, value: bool) -> None: + self._debug = value + self.middleware_stack = self.build_middleware_stack() + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + return self.router.url_path_for(name, **path_params) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + scope["app"] = self + await self.middleware_stack(scope, receive, send) + + # The following usages are now discouraged in favour of configuration + # during Starlette.__init__(...) + def on_event(self, event_type: str) -> typing.Callable: # pragma: nocover + return self.router.on_event(event_type) + + def mount( + self, path: str, app: ASGIApp, name: typing.Optional[str] = None + ) -> None: # pragma: nocover + """ + We no longer document this API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Mount(path, ...), + ... + ] + + app = Starlette(routes=routes) + """ + + self.router.mount(path, app=app, name=name) + + def host( + self, host: str, app: ASGIApp, name: typing.Optional[str] = None + ) -> None: # pragma: no cover + """ + We no longer document this API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Host(path, ...), + ... + ] + + app = Starlette(routes=routes) + """ + + self.router.host(host, app=app, name=name) + + def add_middleware( + self, middleware_class: type, **options: typing.Any + ) -> None: # pragma: no cover + self.user_middleware.insert(0, Middleware(middleware_class, **options)) + self.middleware_stack = self.build_middleware_stack() + + def add_exception_handler( + self, + exc_class_or_status_code: typing.Union[int, typing.Type[Exception]], + handler: typing.Callable, + ) -> None: # pragma: no cover + self.exception_handlers[exc_class_or_status_code] = handler + self.middleware_stack = self.build_middleware_stack() + + def add_event_handler( + self, event_type: str, func: typing.Callable + ) -> None: # pragma: no cover + self.router.add_event_handler(event_type, func) + + def add_route( + self, + path: str, + route: typing.Callable, + methods: typing.Optional[typing.List[str]] = None, + name: typing.Optional[str] = None, + include_in_schema: bool = True, + ) -> None: # pragma: no cover + self.router.add_route( + path, route, methods=methods, name=name, include_in_schema=include_in_schema + ) + + def add_websocket_route( + self, path: str, route: typing.Callable, name: typing.Optional[str] = None + ) -> None: # pragma: no cover + self.router.add_websocket_route(path, route, name=name) + + def exception_handler( + self, exc_class_or_status_code: typing.Union[int, typing.Type[Exception]] + ) -> typing.Callable: # pragma: nocover + def decorator(func: typing.Callable) -> typing.Callable: + self.add_exception_handler(exc_class_or_status_code, func) + return func + + return decorator + + def route( + self, + path: str, + methods: typing.Optional[typing.List[str]] = None, + name: typing.Optional[str] = None, + include_in_schema: bool = True, + ) -> typing.Callable: # pragma: nocover + """ + We no longer document this decorator style API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Route(path, endpoint=..., ...), + ... + ] + + app = Starlette(routes=routes) + """ + + def decorator(func: typing.Callable) -> typing.Callable: + self.router.add_route( + path, + func, + methods=methods, + name=name, + include_in_schema=include_in_schema, + ) + return func + + return decorator + + def websocket_route( + self, path: str, name: typing.Optional[str] = None + ) -> typing.Callable: # pragma: nocover + """ + We no longer document this decorator style API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + WebSocketRoute(path, endpoint=..., ...), + ... + ] + + app = Starlette(routes=routes) + """ + + def decorator(func: typing.Callable) -> typing.Callable: + self.router.add_websocket_route(path, func, name=name) + return func + + return decorator + + def middleware(self, middleware_type: str) -> typing.Callable: # pragma: nocover + """ + We no longer document this decorator style API, and its usage is discouraged. + Instead you should use the following approach: + + middleware = [ + Middleware(...), + ... + ] + + app = Starlette(middleware=middleware) + """ + + assert ( + middleware_type == "http" + ), 'Currently only middleware("http") is supported.' + + def decorator(func: typing.Callable) -> typing.Callable: + self.add_middleware(BaseHTTPMiddleware, dispatch=func) + return func + + return decorator diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/authentication.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/authentication.py new file mode 100644 index 00000000..17f4a5ea --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/authentication.py @@ -0,0 +1,151 @@ +import asyncio +import functools +import inspect +import typing +from urllib.parse import urlencode + +from starlette.exceptions import HTTPException +from starlette.requests import HTTPConnection, Request +from starlette.responses import RedirectResponse, Response +from starlette.websockets import WebSocket + + +def has_required_scope(conn: HTTPConnection, scopes: typing.Sequence[str]) -> bool: + for scope in scopes: + if scope not in conn.auth.scopes: + return False + return True + + +def requires( + scopes: typing.Union[str, typing.Sequence[str]], + status_code: int = 403, + redirect: typing.Optional[str] = None, +) -> typing.Callable: + scopes_list = [scopes] if isinstance(scopes, str) else list(scopes) + + def decorator(func: typing.Callable) -> typing.Callable: + sig = inspect.signature(func) + for idx, parameter in enumerate(sig.parameters.values()): + if parameter.name == "request" or parameter.name == "websocket": + type_ = parameter.name + break + else: + raise Exception( + f'No "request" or "websocket" argument on function "{func}"' + ) + + if type_ == "websocket": + # Handle websocket functions. (Always async) + @functools.wraps(func) + async def websocket_wrapper( + *args: typing.Any, **kwargs: typing.Any + ) -> None: + websocket = kwargs.get( + "websocket", args[idx] if idx < len(args) else None + ) + assert isinstance(websocket, WebSocket) + + if not has_required_scope(websocket, scopes_list): + await websocket.close() + else: + await func(*args, **kwargs) + + return websocket_wrapper + + elif asyncio.iscoroutinefunction(func): + # Handle async request/response functions. + @functools.wraps(func) + async def async_wrapper( + *args: typing.Any, **kwargs: typing.Any + ) -> Response: + request = kwargs.get("request", args[idx] if idx < len(args) else None) + assert isinstance(request, Request) + + if not has_required_scope(request, scopes_list): + if redirect is not None: + orig_request_qparam = urlencode({"next": str(request.url)}) + next_url = "{redirect_path}?{orig_request}".format( + redirect_path=request.url_for(redirect), + orig_request=orig_request_qparam, + ) + return RedirectResponse(url=next_url, status_code=303) + raise HTTPException(status_code=status_code) + return await func(*args, **kwargs) + + return async_wrapper + + else: + # Handle sync request/response functions. + @functools.wraps(func) + def sync_wrapper(*args: typing.Any, **kwargs: typing.Any) -> Response: + request = kwargs.get("request", args[idx] if idx < len(args) else None) + assert isinstance(request, Request) + + if not has_required_scope(request, scopes_list): + if redirect is not None: + orig_request_qparam = urlencode({"next": str(request.url)}) + next_url = "{redirect_path}?{orig_request}".format( + redirect_path=request.url_for(redirect), + orig_request=orig_request_qparam, + ) + return RedirectResponse(url=next_url, status_code=303) + raise HTTPException(status_code=status_code) + return func(*args, **kwargs) + + return sync_wrapper + + return decorator + + +class AuthenticationError(Exception): + pass + + +class AuthenticationBackend: + async def authenticate( + self, conn: HTTPConnection + ) -> typing.Optional[typing.Tuple["AuthCredentials", "BaseUser"]]: + raise NotImplementedError() # pragma: no cover + + +class AuthCredentials: + def __init__(self, scopes: typing.Optional[typing.Sequence[str]] = None): + self.scopes = [] if scopes is None else list(scopes) + + +class BaseUser: + @property + def is_authenticated(self) -> bool: + raise NotImplementedError() # pragma: no cover + + @property + def display_name(self) -> str: + raise NotImplementedError() # pragma: no cover + + @property + def identity(self) -> str: + raise NotImplementedError() # pragma: no cover + + +class SimpleUser(BaseUser): + def __init__(self, username: str) -> None: + self.username = username + + @property + def is_authenticated(self) -> bool: + return True + + @property + def display_name(self) -> str: + return self.username + + +class UnauthenticatedUser(BaseUser): + @property + def is_authenticated(self) -> bool: + return False + + @property + def display_name(self) -> str: + return "" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/background.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/background.py new file mode 100644 index 00000000..145324e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/background.py @@ -0,0 +1,43 @@ +import asyncio +import sys +import typing + +if sys.version_info >= (3, 10): # pragma: no cover + from typing import ParamSpec +else: # pragma: no cover + from typing_extensions import ParamSpec + +from starlette.concurrency import run_in_threadpool + +P = ParamSpec("P") + + +class BackgroundTask: + def __init__( + self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs + ) -> None: + self.func = func + self.args = args + self.kwargs = kwargs + self.is_async = asyncio.iscoroutinefunction(func) + + async def __call__(self) -> None: + if self.is_async: + await self.func(*self.args, **self.kwargs) + else: + await run_in_threadpool(self.func, *self.args, **self.kwargs) + + +class BackgroundTasks(BackgroundTask): + def __init__(self, tasks: typing.Optional[typing.Sequence[BackgroundTask]] = None): + self.tasks = list(tasks) if tasks else [] + + def add_task( + self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs + ) -> None: + task = BackgroundTask(func, *args, **kwargs) + self.tasks.append(task) + + async def __call__(self) -> None: + for task in self.tasks: + await task() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/concurrency.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/concurrency.py new file mode 100644 index 00000000..5c76cb3d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/concurrency.py @@ -0,0 +1,65 @@ +import functools +import sys +import typing +import warnings + +import anyio + +if sys.version_info >= (3, 10): # pragma: no cover + from typing import ParamSpec +else: # pragma: no cover + from typing_extensions import ParamSpec + + +T = typing.TypeVar("T") +P = ParamSpec("P") + + +async def run_until_first_complete(*args: typing.Tuple[typing.Callable, dict]) -> None: + warnings.warn( + "run_until_first_complete is deprecated " + "and will be removed in a future version.", + DeprecationWarning, + ) + + async with anyio.create_task_group() as task_group: + + async def run(func: typing.Callable[[], typing.Coroutine]) -> None: + await func() + task_group.cancel_scope.cancel() + + for func, kwargs in args: + task_group.start_soon(run, functools.partial(func, **kwargs)) + + +async def run_in_threadpool( + func: typing.Callable[P, T], *args: P.args, **kwargs: P.kwargs +) -> T: + if kwargs: # pragma: no cover + # run_sync doesn't accept 'kwargs', so bind them in here + func = functools.partial(func, **kwargs) + return await anyio.to_thread.run_sync(func, *args) + + +class _StopIteration(Exception): + pass + + +def _next(iterator: typing.Iterator[T]) -> T: + # We can't raise `StopIteration` from within the threadpool iterator + # and catch it outside that context, so we coerce them into a different + # exception type. + try: + return next(iterator) + except StopIteration: + raise _StopIteration + + +async def iterate_in_threadpool( + iterator: typing.Iterator[T], +) -> typing.AsyncIterator[T]: + while True: + try: + yield await anyio.to_thread.run_sync(_next, iterator) + except _StopIteration: + break diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/config.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/config.py new file mode 100644 index 00000000..e9e809c7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/config.py @@ -0,0 +1,144 @@ +import os +import typing +from collections.abc import MutableMapping +from pathlib import Path + + +class undefined: + pass + + +class EnvironError(Exception): + pass + + +class Environ(MutableMapping): + def __init__(self, environ: typing.MutableMapping = os.environ): + self._environ = environ + self._has_been_read: typing.Set[typing.Any] = set() + + def __getitem__(self, key: typing.Any) -> typing.Any: + self._has_been_read.add(key) + return self._environ.__getitem__(key) + + def __setitem__(self, key: typing.Any, value: typing.Any) -> None: + if key in self._has_been_read: + raise EnvironError( + f"Attempting to set environ['{key}'], but the value has already been " + "read." + ) + self._environ.__setitem__(key, value) + + def __delitem__(self, key: typing.Any) -> None: + if key in self._has_been_read: + raise EnvironError( + f"Attempting to delete environ['{key}'], but the value has already " + "been read." + ) + self._environ.__delitem__(key) + + def __iter__(self) -> typing.Iterator: + return iter(self._environ) + + def __len__(self) -> int: + return len(self._environ) + + +environ = Environ() + +T = typing.TypeVar("T") + + +class Config: + def __init__( + self, + env_file: typing.Optional[typing.Union[str, Path]] = None, + environ: typing.Mapping[str, str] = environ, + ) -> None: + self.environ = environ + self.file_values: typing.Dict[str, str] = {} + if env_file is not None and os.path.isfile(env_file): + self.file_values = self._read_file(env_file) + + @typing.overload + def __call__( + self, key: str, cast: typing.Type[T], default: T = ... + ) -> T: # pragma: no cover + ... + + @typing.overload + def __call__( + self, key: str, cast: typing.Type[str] = ..., default: str = ... + ) -> str: # pragma: no cover + ... + + @typing.overload + def __call__( + self, + key: str, + cast: typing.Callable[[typing.Any], T] = ..., + default: typing.Any = ..., + ) -> T: # pragma: no cover + ... + + @typing.overload + def __call__( + self, key: str, cast: typing.Type[str] = ..., default: T = ... + ) -> typing.Union[T, str]: # pragma: no cover + ... + + def __call__( + self, + key: str, + cast: typing.Optional[typing.Callable] = None, + default: typing.Any = undefined, + ) -> typing.Any: + return self.get(key, cast, default) + + def get( + self, + key: str, + cast: typing.Optional[typing.Callable] = None, + default: typing.Any = undefined, + ) -> typing.Any: + if key in self.environ: + value = self.environ[key] + return self._perform_cast(key, value, cast) + if key in self.file_values: + value = self.file_values[key] + return self._perform_cast(key, value, cast) + if default is not undefined: + return self._perform_cast(key, default, cast) + raise KeyError(f"Config '{key}' is missing, and has no default.") + + def _read_file(self, file_name: typing.Union[str, Path]) -> typing.Dict[str, str]: + file_values: typing.Dict[str, str] = {} + with open(file_name) as input_file: + for line in input_file.readlines(): + line = line.strip() + if "=" in line and not line.startswith("#"): + key, value = line.split("=", 1) + key = key.strip() + value = value.strip().strip("\"'") + file_values[key] = value + return file_values + + def _perform_cast( + self, key: str, value: typing.Any, cast: typing.Optional[typing.Callable] = None + ) -> typing.Any: + if cast is None or value is None: + return value + elif cast is bool and isinstance(value, str): + mapping = {"true": True, "1": True, "false": False, "0": False} + value = value.lower() + if value not in mapping: + raise ValueError( + f"Config '{key}' has value '{value}'. Not a valid bool." + ) + return mapping[value] + try: + return cast(value) + except (TypeError, ValueError): + raise ValueError( + f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}." + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/convertors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/convertors.py new file mode 100644 index 00000000..e3c03d07 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/convertors.py @@ -0,0 +1,87 @@ +import math +import typing +import uuid + +T = typing.TypeVar("T") + + +class Convertor(typing.Generic[T]): + regex: typing.ClassVar[str] = "" + + def convert(self, value: str) -> T: + raise NotImplementedError() # pragma: no cover + + def to_string(self, value: T) -> str: + raise NotImplementedError() # pragma: no cover + + +class StringConvertor(Convertor): + regex = "[^/]+" + + def convert(self, value: str) -> str: + return value + + def to_string(self, value: str) -> str: + value = str(value) + assert "/" not in value, "May not contain path separators" + assert value, "Must not be empty" + return value + + +class PathConvertor(Convertor): + regex = ".*" + + def convert(self, value: str) -> str: + return str(value) + + def to_string(self, value: str) -> str: + return str(value) + + +class IntegerConvertor(Convertor): + regex = "[0-9]+" + + def convert(self, value: str) -> int: + return int(value) + + def to_string(self, value: int) -> str: + value = int(value) + assert value >= 0, "Negative integers are not supported" + return str(value) + + +class FloatConvertor(Convertor): + regex = "[0-9]+(.[0-9]+)?" + + def convert(self, value: str) -> float: + return float(value) + + def to_string(self, value: float) -> str: + value = float(value) + assert value >= 0.0, "Negative floats are not supported" + assert not math.isnan(value), "NaN values are not supported" + assert not math.isinf(value), "Infinite values are not supported" + return ("%0.20f" % value).rstrip("0").rstrip(".") + + +class UUIDConvertor(Convertor): + regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + + def convert(self, value: str) -> uuid.UUID: + return uuid.UUID(value) + + def to_string(self, value: uuid.UUID) -> str: + return str(value) + + +CONVERTOR_TYPES = { + "str": StringConvertor(), + "path": PathConvertor(), + "int": IntegerConvertor(), + "float": FloatConvertor(), + "uuid": UUIDConvertor(), +} + + +def register_url_convertor(key: str, convertor: Convertor) -> None: + CONVERTOR_TYPES[key] = convertor diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/datastructures.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/datastructures.py new file mode 100644 index 00000000..59863282 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/datastructures.py @@ -0,0 +1,696 @@ +import tempfile +import typing +from collections.abc import Sequence +from shlex import shlex +from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit + +from starlette.concurrency import run_in_threadpool +from starlette.types import Scope + + +class Address(typing.NamedTuple): + host: str + port: int + + +class URL: + def __init__( + self, + url: str = "", + scope: typing.Optional[Scope] = None, + **components: typing.Any, + ) -> None: + if scope is not None: + assert not url, 'Cannot set both "url" and "scope".' + assert not components, 'Cannot set both "scope" and "**components".' + scheme = scope.get("scheme", "http") + server = scope.get("server", None) + path = scope.get("root_path", "") + scope["path"] + query_string = scope.get("query_string", b"") + + host_header = None + for key, value in scope["headers"]: + if key == b"host": + host_header = value.decode("latin-1") + break + + if host_header is not None: + url = f"{scheme}://{host_header}{path}" + elif server is None: + url = path + else: + host, port = server + default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme] + if port == default_port: + url = f"{scheme}://{host}{path}" + else: + url = f"{scheme}://{host}:{port}{path}" + + if query_string: + url += "?" + query_string.decode() + elif components: + assert not url, 'Cannot set both "url" and "**components".' + url = URL("").replace(**components).components.geturl() + + self._url = url + + @property + def components(self) -> SplitResult: + if not hasattr(self, "_components"): + self._components = urlsplit(self._url) + return self._components + + @property + def scheme(self) -> str: + return self.components.scheme + + @property + def netloc(self) -> str: + return self.components.netloc + + @property + def path(self) -> str: + return self.components.path + + @property + def query(self) -> str: + return self.components.query + + @property + def fragment(self) -> str: + return self.components.fragment + + @property + def username(self) -> typing.Union[None, str]: + return self.components.username + + @property + def password(self) -> typing.Union[None, str]: + return self.components.password + + @property + def hostname(self) -> typing.Union[None, str]: + return self.components.hostname + + @property + def port(self) -> typing.Optional[int]: + return self.components.port + + @property + def is_secure(self) -> bool: + return self.scheme in ("https", "wss") + + def replace(self, **kwargs: typing.Any) -> "URL": + if ( + "username" in kwargs + or "password" in kwargs + or "hostname" in kwargs + or "port" in kwargs + ): + hostname = kwargs.pop("hostname", self.hostname) + port = kwargs.pop("port", self.port) + username = kwargs.pop("username", self.username) + password = kwargs.pop("password", self.password) + + netloc = hostname + if port is not None: + netloc += f":{port}" + if username is not None: + userpass = username + if password is not None: + userpass += f":{password}" + netloc = f"{userpass}@{netloc}" + + kwargs["netloc"] = netloc + + components = self.components._replace(**kwargs) + return self.__class__(components.geturl()) + + def include_query_params(self, **kwargs: typing.Any) -> "URL": + params = MultiDict(parse_qsl(self.query, keep_blank_values=True)) + params.update({str(key): str(value) for key, value in kwargs.items()}) + query = urlencode(params.multi_items()) + return self.replace(query=query) + + def replace_query_params(self, **kwargs: typing.Any) -> "URL": + query = urlencode([(str(key), str(value)) for key, value in kwargs.items()]) + return self.replace(query=query) + + def remove_query_params( + self, keys: typing.Union[str, typing.Sequence[str]] + ) -> "URL": + if isinstance(keys, str): + keys = [keys] + params = MultiDict(parse_qsl(self.query, keep_blank_values=True)) + for key in keys: + params.pop(key, None) + query = urlencode(params.multi_items()) + return self.replace(query=query) + + def __eq__(self, other: typing.Any) -> bool: + return str(self) == str(other) + + def __str__(self) -> str: + return self._url + + def __repr__(self) -> str: + url = str(self) + if self.password: + url = str(self.replace(password="********")) + return f"{self.__class__.__name__}({repr(url)})" + + +class URLPath(str): + """ + A URL path string that may also hold an associated protocol and/or host. + Used by the routing to return `url_path_for` matches. + """ + + def __new__(cls, path: str, protocol: str = "", host: str = "") -> "URLPath": + assert protocol in ("http", "websocket", "") + return str.__new__(cls, path) + + def __init__(self, path: str, protocol: str = "", host: str = "") -> None: + self.protocol = protocol + self.host = host + + def make_absolute_url(self, base_url: typing.Union[str, URL]) -> str: + if isinstance(base_url, str): + base_url = URL(base_url) + if self.protocol: + scheme = { + "http": {True: "https", False: "http"}, + "websocket": {True: "wss", False: "ws"}, + }[self.protocol][base_url.is_secure] + else: + scheme = base_url.scheme + + netloc = self.host or base_url.netloc + path = base_url.path.rstrip("/") + str(self) + return str(URL(scheme=scheme, netloc=netloc, path=path)) + + +class Secret: + """ + Holds a string value that should not be revealed in tracebacks etc. + You should cast the value to `str` at the point it is required. + """ + + def __init__(self, value: str): + self._value = value + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return f"{class_name}('**********')" + + def __str__(self) -> str: + return self._value + + +class CommaSeparatedStrings(Sequence): + def __init__(self, value: typing.Union[str, typing.Sequence[str]]): + if isinstance(value, str): + splitter = shlex(value, posix=True) + splitter.whitespace = "," + splitter.whitespace_split = True + self._items = [item.strip() for item in splitter] + else: + self._items = list(value) + + def __len__(self) -> int: + return len(self._items) + + def __getitem__(self, index: typing.Union[int, slice]) -> typing.Any: + return self._items[index] + + def __iter__(self) -> typing.Iterator[str]: + return iter(self._items) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + items = [item for item in self] + return f"{class_name}({items!r})" + + def __str__(self) -> str: + return ", ".join(repr(item) for item in self) + + +class ImmutableMultiDict(typing.Mapping): + def __init__( + self, + *args: typing.Union[ + "ImmutableMultiDict", + typing.Mapping, + typing.List[typing.Tuple[typing.Any, typing.Any]], + ], + **kwargs: typing.Any, + ) -> None: + assert len(args) < 2, "Too many arguments." + + value = args[0] if args else [] + if kwargs: + value = ( + ImmutableMultiDict(value).multi_items() + + ImmutableMultiDict(kwargs).multi_items() + ) + + if not value: + _items: typing.List[typing.Tuple[typing.Any, typing.Any]] = [] + elif hasattr(value, "multi_items"): + value = typing.cast(ImmutableMultiDict, value) + _items = list(value.multi_items()) + elif hasattr(value, "items"): + value = typing.cast(typing.Mapping, value) + _items = list(value.items()) + else: + value = typing.cast( + typing.List[typing.Tuple[typing.Any, typing.Any]], value + ) + _items = list(value) + + self._dict = {k: v for k, v in _items} + self._list = _items + + def getlist(self, key: typing.Any) -> typing.List[typing.Any]: + return [item_value for item_key, item_value in self._list if item_key == key] + + def keys(self) -> typing.KeysView: + return self._dict.keys() + + def values(self) -> typing.ValuesView: + return self._dict.values() + + def items(self) -> typing.ItemsView: + return self._dict.items() + + def multi_items(self) -> typing.List[typing.Tuple[str, str]]: + return list(self._list) + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + if key in self._dict: + return self._dict[key] + return default + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self._list) == sorted(other._list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + items = self.multi_items() + return f"{class_name}({items!r})" + + +class MultiDict(ImmutableMultiDict): + def __setitem__(self, key: typing.Any, value: typing.Any) -> None: + self.setlist(key, [value]) + + def __delitem__(self, key: typing.Any) -> None: + self._list = [(k, v) for k, v in self._list if k != key] + del self._dict[key] + + def pop(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + self._list = [(k, v) for k, v in self._list if k != key] + return self._dict.pop(key, default) + + def popitem(self) -> typing.Tuple: + key, value = self._dict.popitem() + self._list = [(k, v) for k, v in self._list if k != key] + return key, value + + def poplist(self, key: typing.Any) -> typing.List: + values = [v for k, v in self._list if k == key] + self.pop(key) + return values + + def clear(self) -> None: + self._dict.clear() + self._list.clear() + + def setdefault(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + if key not in self: + self._dict[key] = default + self._list.append((key, default)) + + return self[key] + + def setlist(self, key: typing.Any, values: typing.List) -> None: + if not values: + self.pop(key, None) + else: + existing_items = [(k, v) for (k, v) in self._list if k != key] + self._list = existing_items + [(key, value) for value in values] + self._dict[key] = values[-1] + + def append(self, key: typing.Any, value: typing.Any) -> None: + self._list.append((key, value)) + self._dict[key] = value + + def update( + self, + *args: typing.Union[ + "MultiDict", + typing.Mapping, + typing.List[typing.Tuple[typing.Any, typing.Any]], + ], + **kwargs: typing.Any, + ) -> None: + value = MultiDict(*args, **kwargs) + existing_items = [(k, v) for (k, v) in self._list if k not in value.keys()] + self._list = existing_items + value.multi_items() + self._dict.update(value) + + +class QueryParams(ImmutableMultiDict): + """ + An immutable multidict. + """ + + def __init__( + self, + *args: typing.Union[ + "ImmutableMultiDict", + typing.Mapping, + typing.List[typing.Tuple[typing.Any, typing.Any]], + str, + bytes, + ], + **kwargs: typing.Any, + ) -> None: + assert len(args) < 2, "Too many arguments." + + value = args[0] if args else [] + + if isinstance(value, str): + super().__init__(parse_qsl(value, keep_blank_values=True), **kwargs) + elif isinstance(value, bytes): + super().__init__( + parse_qsl(value.decode("latin-1"), keep_blank_values=True), **kwargs + ) + else: + super().__init__(*args, **kwargs) # type: ignore + self._list = [(str(k), str(v)) for k, v in self._list] + self._dict = {str(k): str(v) for k, v in self._dict.items()} + + def __str__(self) -> str: + return urlencode(self._list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + +class UploadFile: + """ + An uploaded file included as part of the request data. + """ + + spool_max_size = 1024 * 1024 + file: typing.BinaryIO + headers: "Headers" + + def __init__( + self, + filename: str, + file: typing.Optional[typing.BinaryIO] = None, + content_type: str = "", + *, + headers: "typing.Optional[Headers]" = None, + ) -> None: + self.filename = filename + self.content_type = content_type + if file is None: + self.file = tempfile.SpooledTemporaryFile(max_size=self.spool_max_size) # type: ignore # noqa: E501 + else: + self.file = file + self.headers = headers or Headers() + + @property + def _in_memory(self) -> bool: + rolled_to_disk = getattr(self.file, "_rolled", True) + return not rolled_to_disk + + async def write(self, data: bytes) -> None: + if self._in_memory: + self.file.write(data) + else: + await run_in_threadpool(self.file.write, data) + + async def read(self, size: int = -1) -> bytes: + if self._in_memory: + return self.file.read(size) + return await run_in_threadpool(self.file.read, size) + + async def seek(self, offset: int) -> None: + if self._in_memory: + self.file.seek(offset) + else: + await run_in_threadpool(self.file.seek, offset) + + async def close(self) -> None: + if self._in_memory: + self.file.close() + else: + await run_in_threadpool(self.file.close) + + +class FormData(ImmutableMultiDict): + """ + An immutable multidict, containing both file uploads and text input. + """ + + def __init__( + self, + *args: typing.Union[ + "FormData", + typing.Mapping[str, typing.Union[str, UploadFile]], + typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]], + ], + **kwargs: typing.Union[str, UploadFile], + ) -> None: + super().__init__(*args, **kwargs) + + async def close(self) -> None: + for key, value in self.multi_items(): + if isinstance(value, UploadFile): + await value.close() + + +class Headers(typing.Mapping[str, str]): + """ + An immutable, case-insensitive multidict. + """ + + def __init__( + self, + headers: typing.Optional[typing.Mapping[str, str]] = None, + raw: typing.Optional[typing.List[typing.Tuple[bytes, bytes]]] = None, + scope: typing.Optional[typing.Mapping[str, typing.Any]] = None, + ) -> None: + self._list: typing.List[typing.Tuple[bytes, bytes]] = [] + if headers is not None: + assert raw is None, 'Cannot set both "headers" and "raw".' + assert scope is None, 'Cannot set both "headers" and "scope".' + self._list = [ + (key.lower().encode("latin-1"), value.encode("latin-1")) + for key, value in headers.items() + ] + elif raw is not None: + assert scope is None, 'Cannot set both "raw" and "scope".' + self._list = raw + elif scope is not None: + self._list = scope["headers"] + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + return list(self._list) + + def keys(self) -> typing.List[str]: # type: ignore + return [key.decode("latin-1") for key, value in self._list] + + def values(self) -> typing.List[str]: # type: ignore + return [value.decode("latin-1") for key, value in self._list] + + def items(self) -> typing.List[typing.Tuple[str, str]]: # type: ignore + return [ + (key.decode("latin-1"), value.decode("latin-1")) + for key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + try: + return self[key] + except KeyError: + return default + + def getlist(self, key: str) -> typing.List[str]: + get_header_key = key.lower().encode("latin-1") + return [ + item_value.decode("latin-1") + for item_key, item_value in self._list + if item_key == get_header_key + ] + + def mutablecopy(self) -> "MutableHeaders": + return MutableHeaders(raw=self._list[:]) + + def __getitem__(self, key: str) -> str: + get_header_key = key.lower().encode("latin-1") + for header_key, header_value in self._list: + if header_key == get_header_key: + return header_value.decode("latin-1") + raise KeyError(key) + + def __contains__(self, key: typing.Any) -> bool: + get_header_key = key.lower().encode("latin-1") + for header_key, header_value in self._list: + if header_key == get_header_key: + return True + return False + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, Headers): + return False + return sorted(self._list) == sorted(other._list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + as_dict = dict(self.items()) + if len(as_dict) == len(self): + return f"{class_name}({as_dict!r})" + return f"{class_name}(raw={self.raw!r})" + + +class MutableHeaders(Headers): + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.lower().encode("latin-1") + set_value = value.encode("latin-1") + + found_indexes = [] + for idx, (item_key, item_value) in enumerate(self._list): + if item_key == set_key: + found_indexes.append(idx) + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, set_value) + else: + self._list.append((set_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode("latin-1") + + pop_indexes = [] + for idx, (item_key, item_value) in enumerate(self._list): + if item_key == del_key: + pop_indexes.append(idx) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __ior__(self, other: typing.Mapping) -> "MutableHeaders": + if not isinstance(other, typing.Mapping): + raise TypeError(f"Expected a mapping but got {other.__class__.__name__}") + self.update(other) + return self + + def __or__(self, other: typing.Mapping) -> "MutableHeaders": + if not isinstance(other, typing.Mapping): + raise TypeError(f"Expected a mapping but got {other.__class__.__name__}") + new = self.mutablecopy() + new.update(other) + return new + + @property + def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: + return self._list + + def setdefault(self, key: str, value: str) -> str: + """ + If the header `key` does not exist, then set it to `value`. + Returns the header value. + """ + set_key = key.lower().encode("latin-1") + set_value = value.encode("latin-1") + + for idx, (item_key, item_value) in enumerate(self._list): + if item_key == set_key: + return item_value.decode("latin-1") + self._list.append((set_key, set_value)) + return value + + def update(self, other: typing.Mapping) -> None: + for key, val in other.items(): + self[key] = val + + def append(self, key: str, value: str) -> None: + """ + Append a header, preserving any duplicate entries. + """ + append_key = key.lower().encode("latin-1") + append_value = value.encode("latin-1") + self._list.append((append_key, append_value)) + + def add_vary_header(self, vary: str) -> None: + existing = self.get("vary") + if existing is not None: + vary = ", ".join([existing, vary]) + self["vary"] = vary + + +class State: + """ + An object that can be used to store arbitrary state. + + Used for `request.state` and `app.state`. + """ + + _state: typing.Dict[str, typing.Any] + + def __init__(self, state: typing.Optional[typing.Dict[str, typing.Any]] = None): + if state is None: + state = {} + super().__setattr__("_state", state) + + def __setattr__(self, key: typing.Any, value: typing.Any) -> None: + self._state[key] = value + + def __getattr__(self, key: typing.Any) -> typing.Any: + try: + return self._state[key] + except KeyError: + message = "'{}' object has no attribute '{}'" + raise AttributeError(message.format(self.__class__.__name__, key)) + + def __delattr__(self, key: typing.Any) -> None: + del self._state[key] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/endpoints.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/endpoints.py new file mode 100644 index 00000000..f2468a32 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/endpoints.py @@ -0,0 +1,132 @@ +import asyncio +import json +import typing + +from starlette import status +from starlette.concurrency import run_in_threadpool +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import PlainTextResponse, Response +from starlette.types import Message, Receive, Scope, Send +from starlette.websockets import WebSocket + + +class HTTPEndpoint: + def __init__(self, scope: Scope, receive: Receive, send: Send) -> None: + assert scope["type"] == "http" + self.scope = scope + self.receive = receive + self.send = send + self._allowed_methods = [ + method + for method in ("GET", "HEAD", "POST", "PUT", "PATCH", "DELETE", "OPTIONS") + if getattr(self, method.lower(), None) is not None + ] + + def __await__(self) -> typing.Generator: + return self.dispatch().__await__() + + async def dispatch(self) -> None: + request = Request(self.scope, receive=self.receive) + handler_name = ( + "get" + if request.method == "HEAD" and not hasattr(self, "head") + else request.method.lower() + ) + + handler: typing.Callable[[Request], typing.Any] = getattr( + self, handler_name, self.method_not_allowed + ) + is_async = asyncio.iscoroutinefunction(handler) + if is_async: + response = await handler(request) + else: + response = await run_in_threadpool(handler, request) + await response(self.scope, self.receive, self.send) + + async def method_not_allowed(self, request: Request) -> Response: + # If we're running inside a starlette application then raise an + # exception, so that the configurable exception handler can deal with + # returning the response. For plain ASGI apps, just return the response. + headers = {"Allow": ", ".join(self._allowed_methods)} + if "app" in self.scope: + raise HTTPException(status_code=405, headers=headers) + return PlainTextResponse("Method Not Allowed", status_code=405, headers=headers) + + +class WebSocketEndpoint: + + encoding: typing.Optional[str] = None # May be "text", "bytes", or "json". + + def __init__(self, scope: Scope, receive: Receive, send: Send) -> None: + assert scope["type"] == "websocket" + self.scope = scope + self.receive = receive + self.send = send + + def __await__(self) -> typing.Generator: + return self.dispatch().__await__() + + async def dispatch(self) -> None: + websocket = WebSocket(self.scope, receive=self.receive, send=self.send) + await self.on_connect(websocket) + + close_code = status.WS_1000_NORMAL_CLOSURE + + try: + while True: + message = await websocket.receive() + if message["type"] == "websocket.receive": + data = await self.decode(websocket, message) + await self.on_receive(websocket, data) + elif message["type"] == "websocket.disconnect": + close_code = int( + message.get("code") or status.WS_1000_NORMAL_CLOSURE + ) + break + except Exception as exc: + close_code = status.WS_1011_INTERNAL_ERROR + raise exc + finally: + await self.on_disconnect(websocket, close_code) + + async def decode(self, websocket: WebSocket, message: Message) -> typing.Any: + + if self.encoding == "text": + if "text" not in message: + await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) + raise RuntimeError("Expected text websocket messages, but got bytes") + return message["text"] + + elif self.encoding == "bytes": + if "bytes" not in message: + await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) + raise RuntimeError("Expected bytes websocket messages, but got text") + return message["bytes"] + + elif self.encoding == "json": + if message.get("text") is not None: + text = message["text"] + else: + text = message["bytes"].decode("utf-8") + + try: + return json.loads(text) + except json.decoder.JSONDecodeError: + await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA) + raise RuntimeError("Malformed JSON data received.") + + assert ( + self.encoding is None + ), f"Unsupported 'encoding' attribute {self.encoding}" + return message["text"] if message.get("text") else message["bytes"] + + async def on_connect(self, websocket: WebSocket) -> None: + """Override to handle an incoming websocket connection""" + await websocket.accept() + + async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None: + """Override to handle an incoming websocket message""" + + async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None: + """Override to handle a disconnecting websocket""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/exceptions.py new file mode 100644 index 00000000..61039c59 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/exceptions.py @@ -0,0 +1,111 @@ +import asyncio +import http +import typing + +from starlette.concurrency import run_in_threadpool +from starlette.requests import Request +from starlette.responses import PlainTextResponse, Response +from starlette.types import ASGIApp, Message, Receive, Scope, Send + + +class HTTPException(Exception): + def __init__( + self, + status_code: int, + detail: typing.Optional[str] = None, + headers: typing.Optional[dict] = None, + ) -> None: + if detail is None: + detail = http.HTTPStatus(status_code).phrase + self.status_code = status_code + self.detail = detail + self.headers = headers + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})" + + +class ExceptionMiddleware: + def __init__( + self, + app: ASGIApp, + handlers: typing.Optional[ + typing.Mapping[typing.Any, typing.Callable[[Request, Exception], Response]] + ] = None, + debug: bool = False, + ) -> None: + self.app = app + self.debug = debug # TODO: We ought to handle 404 cases if debug is set. + self._status_handlers: typing.Dict[int, typing.Callable] = {} + self._exception_handlers: typing.Dict[ + typing.Type[Exception], typing.Callable + ] = {HTTPException: self.http_exception} + if handlers is not None: + for key, value in handlers.items(): + self.add_exception_handler(key, value) + + def add_exception_handler( + self, + exc_class_or_status_code: typing.Union[int, typing.Type[Exception]], + handler: typing.Callable[[Request, Exception], Response], + ) -> None: + if isinstance(exc_class_or_status_code, int): + self._status_handlers[exc_class_or_status_code] = handler + else: + assert issubclass(exc_class_or_status_code, Exception) + self._exception_handlers[exc_class_or_status_code] = handler + + def _lookup_exception_handler( + self, exc: Exception + ) -> typing.Optional[typing.Callable]: + for cls in type(exc).__mro__: + if cls in self._exception_handlers: + return self._exception_handlers[cls] + return None + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + response_started = False + + async def sender(message: Message) -> None: + nonlocal response_started + + if message["type"] == "http.response.start": + response_started = True + await send(message) + + try: + await self.app(scope, receive, sender) + except Exception as exc: + handler = None + + if isinstance(exc, HTTPException): + handler = self._status_handlers.get(exc.status_code) + + if handler is None: + handler = self._lookup_exception_handler(exc) + + if handler is None: + raise exc + + if response_started: + msg = "Caught handled exception, but response already started." + raise RuntimeError(msg) from exc + + request = Request(scope, receive=receive) + if asyncio.iscoroutinefunction(handler): + response = await handler(request, exc) + else: + response = await run_in_threadpool(handler, request, exc) + await response(scope, receive, sender) + + def http_exception(self, request: Request, exc: HTTPException) -> Response: + if exc.status_code in {204, 304}: + return Response(status_code=exc.status_code, headers=exc.headers) + return PlainTextResponse( + exc.detail, status_code=exc.status_code, headers=exc.headers + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/formparsers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/formparsers.py new file mode 100644 index 00000000..fd194922 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/formparsers.py @@ -0,0 +1,238 @@ +import typing +from enum import Enum +from urllib.parse import unquote_plus + +from starlette.datastructures import FormData, Headers, UploadFile + +try: + import multipart + from multipart.multipart import parse_options_header +except ImportError: # pragma: nocover + parse_options_header = None + multipart = None + + +class FormMessage(Enum): + FIELD_START = 1 + FIELD_NAME = 2 + FIELD_DATA = 3 + FIELD_END = 4 + END = 5 + + +class MultiPartMessage(Enum): + PART_BEGIN = 1 + PART_DATA = 2 + PART_END = 3 + HEADER_FIELD = 4 + HEADER_VALUE = 5 + HEADER_END = 6 + HEADERS_FINISHED = 7 + END = 8 + + +def _user_safe_decode(src: bytes, codec: str) -> str: + try: + return src.decode(codec) + except (UnicodeDecodeError, LookupError): + return src.decode("latin-1") + + +class FormParser: + def __init__( + self, headers: Headers, stream: typing.AsyncGenerator[bytes, None] + ) -> None: + assert ( + multipart is not None + ), "The `python-multipart` library must be installed to use form parsing." + self.headers = headers + self.stream = stream + self.messages: typing.List[typing.Tuple[FormMessage, bytes]] = [] + + def on_field_start(self) -> None: + message = (FormMessage.FIELD_START, b"") + self.messages.append(message) + + def on_field_name(self, data: bytes, start: int, end: int) -> None: + message = (FormMessage.FIELD_NAME, data[start:end]) + self.messages.append(message) + + def on_field_data(self, data: bytes, start: int, end: int) -> None: + message = (FormMessage.FIELD_DATA, data[start:end]) + self.messages.append(message) + + def on_field_end(self) -> None: + message = (FormMessage.FIELD_END, b"") + self.messages.append(message) + + def on_end(self) -> None: + message = (FormMessage.END, b"") + self.messages.append(message) + + async def parse(self) -> FormData: + # Callbacks dictionary. + callbacks = { + "on_field_start": self.on_field_start, + "on_field_name": self.on_field_name, + "on_field_data": self.on_field_data, + "on_field_end": self.on_field_end, + "on_end": self.on_end, + } + + # Create the parser. + parser = multipart.QuerystringParser(callbacks) + field_name = b"" + field_value = b"" + + items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = [] + + # Feed the parser with data from the request. + async for chunk in self.stream: + if chunk: + parser.write(chunk) + else: + parser.finalize() + messages = list(self.messages) + self.messages.clear() + for message_type, message_bytes in messages: + if message_type == FormMessage.FIELD_START: + field_name = b"" + field_value = b"" + elif message_type == FormMessage.FIELD_NAME: + field_name += message_bytes + elif message_type == FormMessage.FIELD_DATA: + field_value += message_bytes + elif message_type == FormMessage.FIELD_END: + name = unquote_plus(field_name.decode("latin-1")) + value = unquote_plus(field_value.decode("latin-1")) + items.append((name, value)) + + return FormData(items) + + +class MultiPartParser: + def __init__( + self, headers: Headers, stream: typing.AsyncGenerator[bytes, None] + ) -> None: + assert ( + multipart is not None + ), "The `python-multipart` library must be installed to use form parsing." + self.headers = headers + self.stream = stream + self.messages: typing.List[typing.Tuple[MultiPartMessage, bytes]] = [] + + def on_part_begin(self) -> None: + message = (MultiPartMessage.PART_BEGIN, b"") + self.messages.append(message) + + def on_part_data(self, data: bytes, start: int, end: int) -> None: + message = (MultiPartMessage.PART_DATA, data[start:end]) + self.messages.append(message) + + def on_part_end(self) -> None: + message = (MultiPartMessage.PART_END, b"") + self.messages.append(message) + + def on_header_field(self, data: bytes, start: int, end: int) -> None: + message = (MultiPartMessage.HEADER_FIELD, data[start:end]) + self.messages.append(message) + + def on_header_value(self, data: bytes, start: int, end: int) -> None: + message = (MultiPartMessage.HEADER_VALUE, data[start:end]) + self.messages.append(message) + + def on_header_end(self) -> None: + message = (MultiPartMessage.HEADER_END, b"") + self.messages.append(message) + + def on_headers_finished(self) -> None: + message = (MultiPartMessage.HEADERS_FINISHED, b"") + self.messages.append(message) + + def on_end(self) -> None: + message = (MultiPartMessage.END, b"") + self.messages.append(message) + + async def parse(self) -> FormData: + # Parse the Content-Type header to get the multipart boundary. + content_type, params = parse_options_header(self.headers["Content-Type"]) + charset = params.get(b"charset", "utf-8") + if type(charset) == bytes: + charset = charset.decode("latin-1") + boundary = params[b"boundary"] + + # Callbacks dictionary. + callbacks = { + "on_part_begin": self.on_part_begin, + "on_part_data": self.on_part_data, + "on_part_end": self.on_part_end, + "on_header_field": self.on_header_field, + "on_header_value": self.on_header_value, + "on_header_end": self.on_header_end, + "on_headers_finished": self.on_headers_finished, + "on_end": self.on_end, + } + + # Create the parser. + parser = multipart.MultipartParser(boundary, callbacks) + header_field = b"" + header_value = b"" + content_disposition = None + content_type = b"" + field_name = "" + data = b"" + file: typing.Optional[UploadFile] = None + + items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = [] + item_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + + # Feed the parser with data from the request. + async for chunk in self.stream: + parser.write(chunk) + messages = list(self.messages) + self.messages.clear() + for message_type, message_bytes in messages: + if message_type == MultiPartMessage.PART_BEGIN: + content_disposition = None + content_type = b"" + data = b"" + item_headers = [] + elif message_type == MultiPartMessage.HEADER_FIELD: + header_field += message_bytes + elif message_type == MultiPartMessage.HEADER_VALUE: + header_value += message_bytes + elif message_type == MultiPartMessage.HEADER_END: + field = header_field.lower() + if field == b"content-disposition": + content_disposition = header_value + elif field == b"content-type": + content_type = header_value + item_headers.append((field, header_value)) + header_field = b"" + header_value = b"" + elif message_type == MultiPartMessage.HEADERS_FINISHED: + disposition, options = parse_options_header(content_disposition) + field_name = _user_safe_decode(options[b"name"], charset) + if b"filename" in options: + filename = _user_safe_decode(options[b"filename"], charset) + file = UploadFile( + filename=filename, + content_type=content_type.decode("latin-1"), + headers=Headers(raw=item_headers), + ) + else: + file = None + elif message_type == MultiPartMessage.PART_DATA: + if file is None: + data += message_bytes + else: + await file.write(message_bytes) + elif message_type == MultiPartMessage.PART_END: + if file is None: + items.append((field_name, _user_safe_decode(data, charset))) + else: + await file.seek(0) + items.append((field_name, file)) + + parser.finalize() + return FormData(items) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/__init__.py new file mode 100644 index 00000000..5ac5b96c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/__init__.py @@ -0,0 +1,17 @@ +import typing + + +class Middleware: + def __init__(self, cls: type, **options: typing.Any) -> None: + self.cls = cls + self.options = options + + def __iter__(self) -> typing.Iterator: + as_tuple = (self.cls, self.options) + return iter(as_tuple) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + option_strings = [f"{key}={value!r}" for key, value in self.options.items()] + args_repr = ", ".join([self.cls.__name__] + option_strings) + return f"{class_name}({args_repr})" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/authentication.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/authentication.py new file mode 100644 index 00000000..76e4a246 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/authentication.py @@ -0,0 +1,52 @@ +import typing + +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + AuthenticationError, + UnauthenticatedUser, +) +from starlette.requests import HTTPConnection +from starlette.responses import PlainTextResponse, Response +from starlette.types import ASGIApp, Receive, Scope, Send + + +class AuthenticationMiddleware: + def __init__( + self, + app: ASGIApp, + backend: AuthenticationBackend, + on_error: typing.Optional[ + typing.Callable[[HTTPConnection, AuthenticationError], Response] + ] = None, + ) -> None: + self.app = app + self.backend = backend + self.on_error: typing.Callable[ + [HTTPConnection, AuthenticationError], Response + ] = (on_error if on_error is not None else self.default_on_error) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] not in ["http", "websocket"]: + await self.app(scope, receive, send) + return + + conn = HTTPConnection(scope) + try: + auth_result = await self.backend.authenticate(conn) + except AuthenticationError as exc: + response = self.on_error(conn, exc) + if scope["type"] == "websocket": + await send({"type": "websocket.close", "code": 1000}) + else: + await response(scope, receive, send) + return + + if auth_result is None: + auth_result = AuthCredentials(), UnauthenticatedUser() + scope["auth"], scope["user"] = auth_result + await self.app(scope, receive, send) + + @staticmethod + def default_on_error(conn: HTTPConnection, exc: Exception) -> Response: + return PlainTextResponse(str(exc), status_code=400) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/base.py new file mode 100644 index 00000000..ca9deb7d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/base.py @@ -0,0 +1,75 @@ +import typing + +import anyio + +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse +from starlette.types import ASGIApp, Receive, Scope, Send + +RequestResponseEndpoint = typing.Callable[[Request], typing.Awaitable[Response]] +DispatchFunction = typing.Callable[ + [Request, RequestResponseEndpoint], typing.Awaitable[Response] +] + + +class BaseHTTPMiddleware: + def __init__( + self, app: ASGIApp, dispatch: typing.Optional[DispatchFunction] = None + ) -> None: + self.app = app + self.dispatch_func = self.dispatch if dispatch is None else dispatch + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + async def call_next(request: Request) -> Response: + app_exc: typing.Optional[Exception] = None + send_stream, recv_stream = anyio.create_memory_object_stream() + + async def coro() -> None: + nonlocal app_exc + + async with send_stream: + try: + await self.app(scope, request.receive, send_stream.send) + except Exception as exc: + app_exc = exc + + task_group.start_soon(coro) + + try: + message = await recv_stream.receive() + except anyio.EndOfStream: + if app_exc is not None: + raise app_exc + raise RuntimeError("No response returned.") + + assert message["type"] == "http.response.start" + + async def body_stream() -> typing.AsyncGenerator[bytes, None]: + async with recv_stream: + async for message in recv_stream: + assert message["type"] == "http.response.body" + yield message.get("body", b"") + + if app_exc is not None: + raise app_exc + + response = StreamingResponse( + status_code=message["status"], content=body_stream() + ) + response.raw_headers = message["headers"] + return response + + async with anyio.create_task_group() as task_group: + request = Request(scope, receive=receive) + response = await self.dispatch_func(request, call_next) + await response(scope, receive, send) + task_group.cancel_scope.cancel() + + async def dispatch( + self, request: Request, call_next: RequestResponseEndpoint + ) -> Response: + raise NotImplementedError() # pragma: no cover diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/cors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/cors.py new file mode 100644 index 00000000..b36d155f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/cors.py @@ -0,0 +1,177 @@ +import functools +import re +import typing + +from starlette.datastructures import Headers, MutableHeaders +from starlette.responses import PlainTextResponse, Response +from starlette.types import ASGIApp, Message, Receive, Scope, Send + +ALL_METHODS = ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT") +SAFELISTED_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"} + + +class CORSMiddleware: + def __init__( + self, + app: ASGIApp, + allow_origins: typing.Sequence[str] = (), + allow_methods: typing.Sequence[str] = ("GET",), + allow_headers: typing.Sequence[str] = (), + allow_credentials: bool = False, + allow_origin_regex: typing.Optional[str] = None, + expose_headers: typing.Sequence[str] = (), + max_age: int = 600, + ) -> None: + + if "*" in allow_methods: + allow_methods = ALL_METHODS + + compiled_allow_origin_regex = None + if allow_origin_regex is not None: + compiled_allow_origin_regex = re.compile(allow_origin_regex) + + allow_all_origins = "*" in allow_origins + allow_all_headers = "*" in allow_headers + preflight_explicit_allow_origin = not allow_all_origins or allow_credentials + + simple_headers = {} + if allow_all_origins: + simple_headers["Access-Control-Allow-Origin"] = "*" + if allow_credentials: + simple_headers["Access-Control-Allow-Credentials"] = "true" + if expose_headers: + simple_headers["Access-Control-Expose-Headers"] = ", ".join(expose_headers) + + preflight_headers = {} + if preflight_explicit_allow_origin: + # The origin value will be set in preflight_response() if it is allowed. + preflight_headers["Vary"] = "Origin" + else: + preflight_headers["Access-Control-Allow-Origin"] = "*" + preflight_headers.update( + { + "Access-Control-Allow-Methods": ", ".join(allow_methods), + "Access-Control-Max-Age": str(max_age), + } + ) + allow_headers = sorted(SAFELISTED_HEADERS | set(allow_headers)) + if allow_headers and not allow_all_headers: + preflight_headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers) + if allow_credentials: + preflight_headers["Access-Control-Allow-Credentials"] = "true" + + self.app = app + self.allow_origins = allow_origins + self.allow_methods = allow_methods + self.allow_headers = [h.lower() for h in allow_headers] + self.allow_all_origins = allow_all_origins + self.allow_all_headers = allow_all_headers + self.preflight_explicit_allow_origin = preflight_explicit_allow_origin + self.allow_origin_regex = compiled_allow_origin_regex + self.simple_headers = simple_headers + self.preflight_headers = preflight_headers + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http": # pragma: no cover + await self.app(scope, receive, send) + return + + method = scope["method"] + headers = Headers(scope=scope) + origin = headers.get("origin") + + if origin is None: + await self.app(scope, receive, send) + return + + if method == "OPTIONS" and "access-control-request-method" in headers: + response = self.preflight_response(request_headers=headers) + await response(scope, receive, send) + return + + await self.simple_response(scope, receive, send, request_headers=headers) + + def is_allowed_origin(self, origin: str) -> bool: + if self.allow_all_origins: + return True + + if self.allow_origin_regex is not None and self.allow_origin_regex.fullmatch( + origin + ): + return True + + return origin in self.allow_origins + + def preflight_response(self, request_headers: Headers) -> Response: + requested_origin = request_headers["origin"] + requested_method = request_headers["access-control-request-method"] + requested_headers = request_headers.get("access-control-request-headers") + + headers = dict(self.preflight_headers) + failures = [] + + if self.is_allowed_origin(origin=requested_origin): + if self.preflight_explicit_allow_origin: + # The "else" case is already accounted for in self.preflight_headers + # and the value would be "*". + headers["Access-Control-Allow-Origin"] = requested_origin + else: + failures.append("origin") + + if requested_method not in self.allow_methods: + failures.append("method") + + # If we allow all headers, then we have to mirror back any requested + # headers in the response. + if self.allow_all_headers and requested_headers is not None: + headers["Access-Control-Allow-Headers"] = requested_headers + elif requested_headers is not None: + for header in [h.lower() for h in requested_headers.split(",")]: + if header.strip() not in self.allow_headers: + failures.append("headers") + break + + # We don't strictly need to use 400 responses here, since its up to + # the browser to enforce the CORS policy, but its more informative + # if we do. + if failures: + failure_text = "Disallowed CORS " + ", ".join(failures) + return PlainTextResponse(failure_text, status_code=400, headers=headers) + + return PlainTextResponse("OK", status_code=200, headers=headers) + + async def simple_response( + self, scope: Scope, receive: Receive, send: Send, request_headers: Headers + ) -> None: + send = functools.partial(self.send, send=send, request_headers=request_headers) + await self.app(scope, receive, send) + + async def send( + self, message: Message, send: Send, request_headers: Headers + ) -> None: + if message["type"] != "http.response.start": + await send(message) + return + + message.setdefault("headers", []) + headers = MutableHeaders(scope=message) + headers.update(self.simple_headers) + origin = request_headers["Origin"] + has_cookie = "cookie" in request_headers + + # If request includes any cookie headers, then we must respond + # with the specific origin instead of '*'. + if self.allow_all_origins and has_cookie: + self.allow_explicit_origin(headers, origin) + + # If we only allow specific origins, then we have to mirror back + # the Origin header in the response. + elif not self.allow_all_origins and self.is_allowed_origin(origin=origin): + self.allow_explicit_origin(headers, origin) + + await send(message) + + @staticmethod + def allow_explicit_origin(headers: MutableHeaders, origin: str) -> None: + headers["Access-Control-Allow-Origin"] = origin + headers.add_vary_header("Origin") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/errors.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/errors.py new file mode 100644 index 00000000..acb1930f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/errors.py @@ -0,0 +1,254 @@ +import asyncio +import html +import inspect +import traceback +import typing + +from starlette.concurrency import run_in_threadpool +from starlette.requests import Request +from starlette.responses import HTMLResponse, PlainTextResponse, Response +from starlette.types import ASGIApp, Message, Receive, Scope, Send + +STYLES = """ +p { + color: #211c1c; +} +.traceback-container { + border: 1px solid #038BB8; +} +.traceback-title { + background-color: #038BB8; + color: lemonchiffon; + padding: 12px; + font-size: 20px; + margin-top: 0px; +} +.frame-line { + padding-left: 10px; + font-family: monospace; +} +.frame-filename { + font-family: monospace; +} +.center-line { + background-color: #038BB8; + color: #f9f6e1; + padding: 5px 0px 5px 5px; +} +.lineno { + margin-right: 5px; +} +.frame-title { + font-weight: unset; + padding: 10px 10px 10px 10px; + background-color: #E4F4FD; + margin-right: 10px; + color: #191f21; + font-size: 17px; + border: 1px solid #c7dce8; +} +.collapse-btn { + float: right; + padding: 0px 5px 1px 5px; + border: solid 1px #96aebb; + cursor: pointer; +} +.collapsed { + display: none; +} +.source-code { + font-family: courier; + font-size: small; + padding-bottom: 10px; +} +""" + +JS = """ + +""" + +TEMPLATE = """ + + + + Starlette Debugger + + +

500 Server Error

+

{error}

+
+

Traceback

+
{exc_html}
+
+ {js} + + +""" + +FRAME_TEMPLATE = """ +
+

File {frame_filename}, + line {frame_lineno}, + in {frame_name} + {collapse_button} +

+
{code_context}
+
+""" # noqa: E501 + +LINE = """ +

+{lineno}. {line}

+""" + +CENTER_LINE = """ +

+{lineno}. {line}

+""" + + +class ServerErrorMiddleware: + """ + Handles returning 500 responses when a server error occurs. + + If 'debug' is set, then traceback responses will be returned, + otherwise the designated 'handler' will be called. + + This middleware class should generally be used to wrap *everything* + else up, so that unhandled exceptions anywhere in the stack + always result in an appropriate 500 response. + """ + + def __init__( + self, + app: ASGIApp, + handler: typing.Optional[typing.Callable] = None, + debug: bool = False, + ) -> None: + self.app = app + self.handler = handler + self.debug = debug + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + response_started = False + + async def _send(message: Message) -> None: + nonlocal response_started, send + + if message["type"] == "http.response.start": + response_started = True + await send(message) + + try: + await self.app(scope, receive, _send) + except Exception as exc: + request = Request(scope) + if self.debug: + # In debug mode, return traceback responses. + response = self.debug_response(request, exc) + elif self.handler is None: + # Use our default 500 error handler. + response = self.error_response(request, exc) + else: + # Use an installed 500 error handler. + if asyncio.iscoroutinefunction(self.handler): + response = await self.handler(request, exc) + else: + response = await run_in_threadpool(self.handler, request, exc) + + if not response_started: + await response(scope, receive, send) + + # We always continue to raise the exception. + # This allows servers to log the error, or allows test clients + # to optionally raise the error within the test case. + raise exc + + def format_line( + self, index: int, line: str, frame_lineno: int, frame_index: int + ) -> str: + values = { + # HTML escape - line could contain < or > + "line": html.escape(line).replace(" ", " "), + "lineno": (frame_lineno - frame_index) + index, + } + + if index != frame_index: + return LINE.format(**values) + return CENTER_LINE.format(**values) + + def generate_frame_html(self, frame: inspect.FrameInfo, is_collapsed: bool) -> str: + code_context = "".join( + self.format_line(index, line, frame.lineno, frame.index) # type: ignore + for index, line in enumerate(frame.code_context or []) + ) + + values = { + # HTML escape - filename could contain < or >, especially if it's a virtual + # file e.g. in the REPL + "frame_filename": html.escape(frame.filename), + "frame_lineno": frame.lineno, + # HTML escape - if you try very hard it's possible to name a function with < + # or > + "frame_name": html.escape(frame.function), + "code_context": code_context, + "collapsed": "collapsed" if is_collapsed else "", + "collapse_button": "+" if is_collapsed else "‒", + } + return FRAME_TEMPLATE.format(**values) + + def generate_html(self, exc: Exception, limit: int = 7) -> str: + traceback_obj = traceback.TracebackException.from_exception( + exc, capture_locals=True + ) + + exc_html = "" + is_collapsed = False + exc_traceback = exc.__traceback__ + if exc_traceback is not None: + frames = inspect.getinnerframes(exc_traceback, limit) + for frame in reversed(frames): + exc_html += self.generate_frame_html(frame, is_collapsed) + is_collapsed = True + + # escape error class and text + error = ( + f"{html.escape(traceback_obj.exc_type.__name__)}: " + f"{html.escape(str(traceback_obj))}" + ) + + return TEMPLATE.format(styles=STYLES, js=JS, error=error, exc_html=exc_html) + + def generate_plain_text(self, exc: Exception) -> str: + return "".join(traceback.format_exception(type(exc), exc, exc.__traceback__)) + + def debug_response(self, request: Request, exc: Exception) -> Response: + accept = request.headers.get("accept", "") + + if "text/html" in accept: + content = self.generate_html(exc) + return HTMLResponse(content, status_code=500) + content = self.generate_plain_text(exc) + return PlainTextResponse(content, status_code=500) + + def error_response(self, request: Request, exc: Exception) -> Response: + return PlainTextResponse("Internal Server Error", status_code=500) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/gzip.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/gzip.py new file mode 100644 index 00000000..9d69ee7c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/gzip.py @@ -0,0 +1,105 @@ +import gzip +import io +import typing + +from starlette.datastructures import Headers, MutableHeaders +from starlette.types import ASGIApp, Message, Receive, Scope, Send + + +class GZipMiddleware: + def __init__( + self, app: ASGIApp, minimum_size: int = 500, compresslevel: int = 9 + ) -> None: + self.app = app + self.minimum_size = minimum_size + self.compresslevel = compresslevel + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] == "http": + headers = Headers(scope=scope) + if "gzip" in headers.get("Accept-Encoding", ""): + responder = GZipResponder( + self.app, self.minimum_size, compresslevel=self.compresslevel + ) + await responder(scope, receive, send) + return + await self.app(scope, receive, send) + + +class GZipResponder: + def __init__(self, app: ASGIApp, minimum_size: int, compresslevel: int = 9) -> None: + self.app = app + self.minimum_size = minimum_size + self.send: Send = unattached_send + self.initial_message: Message = {} + self.started = False + self.gzip_buffer = io.BytesIO() + self.gzip_file = gzip.GzipFile( + mode="wb", fileobj=self.gzip_buffer, compresslevel=compresslevel + ) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + self.send = send + await self.app(scope, receive, self.send_with_gzip) + + async def send_with_gzip(self, message: Message) -> None: + message_type = message["type"] + if message_type == "http.response.start": + # Don't send the initial message until we've determined how to + # modify the outgoing headers correctly. + self.initial_message = message + elif message_type == "http.response.body" and not self.started: + self.started = True + body = message.get("body", b"") + more_body = message.get("more_body", False) + if len(body) < self.minimum_size and not more_body: + # Don't apply GZip to small outgoing responses. + await self.send(self.initial_message) + await self.send(message) + elif not more_body: + # Standard GZip response. + self.gzip_file.write(body) + self.gzip_file.close() + body = self.gzip_buffer.getvalue() + + headers = MutableHeaders(raw=self.initial_message["headers"]) + headers["Content-Encoding"] = "gzip" + headers["Content-Length"] = str(len(body)) + headers.add_vary_header("Accept-Encoding") + message["body"] = body + + await self.send(self.initial_message) + await self.send(message) + else: + # Initial body in streaming GZip response. + headers = MutableHeaders(raw=self.initial_message["headers"]) + headers["Content-Encoding"] = "gzip" + headers.add_vary_header("Accept-Encoding") + del headers["Content-Length"] + + self.gzip_file.write(body) + message["body"] = self.gzip_buffer.getvalue() + self.gzip_buffer.seek(0) + self.gzip_buffer.truncate() + + await self.send(self.initial_message) + await self.send(message) + + elif message_type == "http.response.body": + # Remaining body in streaming GZip response. + body = message.get("body", b"") + more_body = message.get("more_body", False) + + self.gzip_file.write(body) + if not more_body: + self.gzip_file.close() + + message["body"] = self.gzip_buffer.getvalue() + self.gzip_buffer.seek(0) + self.gzip_buffer.truncate() + + await self.send(message) + + +async def unattached_send(message: Message) -> typing.NoReturn: + raise RuntimeError("send awaitable not set") # pragma: no cover diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/httpsredirect.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/httpsredirect.py new file mode 100644 index 00000000..a8359067 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/httpsredirect.py @@ -0,0 +1,19 @@ +from starlette.datastructures import URL +from starlette.responses import RedirectResponse +from starlette.types import ASGIApp, Receive, Scope, Send + + +class HTTPSRedirectMiddleware: + def __init__(self, app: ASGIApp) -> None: + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] in ("http", "websocket") and scope["scheme"] in ("http", "ws"): + url = URL(scope=scope) + redirect_scheme = {"http": "https", "ws": "wss"}[url.scheme] + netloc = url.hostname if url.port in (80, 443) else url.netloc + url = url.replace(scheme=redirect_scheme, netloc=netloc) + response = RedirectResponse(url, status_code=307) + await response(scope, receive, send) + else: + await self.app(scope, receive, send) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/sessions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/sessions.py new file mode 100644 index 00000000..597de38a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/sessions.py @@ -0,0 +1,80 @@ +import json +import typing +from base64 import b64decode, b64encode + +import itsdangerous +from itsdangerous.exc import BadSignature + +from starlette.datastructures import MutableHeaders, Secret +from starlette.requests import HTTPConnection +from starlette.types import ASGIApp, Message, Receive, Scope, Send + + +class SessionMiddleware: + def __init__( + self, + app: ASGIApp, + secret_key: typing.Union[str, Secret], + session_cookie: str = "session", + max_age: typing.Optional[int] = 14 * 24 * 60 * 60, # 14 days, in seconds + path: str = "/", + same_site: str = "lax", + https_only: bool = False, + ) -> None: + self.app = app + self.signer = itsdangerous.TimestampSigner(str(secret_key)) + self.session_cookie = session_cookie + self.max_age = max_age + self.path = path + self.security_flags = "httponly; samesite=" + same_site + if https_only: # Secure flag can be used with HTTPS only + self.security_flags += "; secure" + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] not in ("http", "websocket"): # pragma: no cover + await self.app(scope, receive, send) + return + + connection = HTTPConnection(scope) + initial_session_was_empty = True + + if self.session_cookie in connection.cookies: + data = connection.cookies[self.session_cookie].encode("utf-8") + try: + data = self.signer.unsign(data, max_age=self.max_age) + scope["session"] = json.loads(b64decode(data)) + initial_session_was_empty = False + except BadSignature: + scope["session"] = {} + else: + scope["session"] = {} + + async def send_wrapper(message: Message) -> None: + if message["type"] == "http.response.start": + if scope["session"]: + # We have session data to persist. + data = b64encode(json.dumps(scope["session"]).encode("utf-8")) + data = self.signer.sign(data) + headers = MutableHeaders(scope=message) + header_value = "{session_cookie}={data}; path={path}; {max_age}{security_flags}".format( # noqa E501 + session_cookie=self.session_cookie, + data=data.decode("utf-8"), + path=self.path, + max_age=f"Max-Age={self.max_age}; " if self.max_age else "", + security_flags=self.security_flags, + ) + headers.append("Set-Cookie", header_value) + elif not initial_session_was_empty: + # The session has been cleared. + headers = MutableHeaders(scope=message) + header_value = "{session_cookie}={data}; path={path}; {expires}{security_flags}".format( # noqa E501 + session_cookie=self.session_cookie, + data="null", + path=self.path, + expires="expires=Thu, 01 Jan 1970 00:00:00 GMT; ", + security_flags=self.security_flags, + ) + headers.append("Set-Cookie", header_value) + await send(message) + + await self.app(scope, receive, send_wrapper) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/trustedhost.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/trustedhost.py new file mode 100644 index 00000000..e84e6876 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/trustedhost.py @@ -0,0 +1,60 @@ +import typing + +from starlette.datastructures import URL, Headers +from starlette.responses import PlainTextResponse, RedirectResponse, Response +from starlette.types import ASGIApp, Receive, Scope, Send + +ENFORCE_DOMAIN_WILDCARD = "Domain wildcard patterns must be like '*.example.com'." + + +class TrustedHostMiddleware: + def __init__( + self, + app: ASGIApp, + allowed_hosts: typing.Optional[typing.Sequence[str]] = None, + www_redirect: bool = True, + ) -> None: + if allowed_hosts is None: + allowed_hosts = ["*"] + + for pattern in allowed_hosts: + assert "*" not in pattern[1:], ENFORCE_DOMAIN_WILDCARD + if pattern.startswith("*") and pattern != "*": + assert pattern.startswith("*."), ENFORCE_DOMAIN_WILDCARD + self.app = app + self.allowed_hosts = list(allowed_hosts) + self.allow_any = "*" in allowed_hosts + self.www_redirect = www_redirect + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.allow_any or scope["type"] not in ( + "http", + "websocket", + ): # pragma: no cover + await self.app(scope, receive, send) + return + + headers = Headers(scope=scope) + host = headers.get("host", "").split(":")[0] + is_valid_host = False + found_www_redirect = False + for pattern in self.allowed_hosts: + if host == pattern or ( + pattern.startswith("*") and host.endswith(pattern[1:]) + ): + is_valid_host = True + break + elif "www." + host == pattern: + found_www_redirect = True + + if is_valid_host: + await self.app(scope, receive, send) + else: + response: Response + if found_www_redirect and self.www_redirect: + url = URL(scope=scope) + redirect_url = url.replace(netloc="www." + url.netloc) + response = RedirectResponse(url=str(redirect_url)) + else: + response = PlainTextResponse("Invalid host header", status_code=400) + await response(scope, receive, send) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/wsgi.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/wsgi.py new file mode 100644 index 00000000..9dbd0652 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/middleware/wsgi.py @@ -0,0 +1,140 @@ +import io +import math +import sys +import typing +import warnings + +import anyio + +from starlette.types import Receive, Scope, Send + +warnings.warn( + "starlette.middleware.wsgi is deprecated and will be removed in a future release. " + "Please refer to https://github.com/abersheeran/a2wsgi as a replacement.", + DeprecationWarning, +) + + +def build_environ(scope: Scope, body: bytes) -> dict: + """ + Builds a scope and request body into a WSGI environ object. + """ + environ = { + "REQUEST_METHOD": scope["method"], + "SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"), + "PATH_INFO": scope["path"].encode("utf8").decode("latin1"), + "QUERY_STRING": scope["query_string"].decode("ascii"), + "SERVER_PROTOCOL": f"HTTP/{scope['http_version']}", + "wsgi.version": (1, 0), + "wsgi.url_scheme": scope.get("scheme", "http"), + "wsgi.input": io.BytesIO(body), + "wsgi.errors": sys.stdout, + "wsgi.multithread": True, + "wsgi.multiprocess": True, + "wsgi.run_once": False, + } + + # Get server name and port - required in WSGI, not in ASGI + server = scope.get("server") or ("localhost", 80) + environ["SERVER_NAME"] = server[0] + environ["SERVER_PORT"] = server[1] + + # Get client IP address + if scope.get("client"): + environ["REMOTE_ADDR"] = scope["client"][0] + + # Go through headers and make them into environ entries + for name, value in scope.get("headers", []): + name = name.decode("latin1") + if name == "content-length": + corrected_name = "CONTENT_LENGTH" + elif name == "content-type": + corrected_name = "CONTENT_TYPE" + else: + corrected_name = f"HTTP_{name}".upper().replace("-", "_") + # HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in + # case + value = value.decode("latin1") + if corrected_name in environ: + value = environ[corrected_name] + "," + value + environ[corrected_name] = value + return environ + + +class WSGIMiddleware: + def __init__(self, app: typing.Callable) -> None: + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + assert scope["type"] == "http" + responder = WSGIResponder(self.app, scope) + await responder(receive, send) + + +class WSGIResponder: + def __init__(self, app: typing.Callable, scope: Scope) -> None: + self.app = app + self.scope = scope + self.status = None + self.response_headers = None + self.stream_send, self.stream_receive = anyio.create_memory_object_stream( + math.inf + ) + self.response_started = False + self.exc_info: typing.Any = None + + async def __call__(self, receive: Receive, send: Send) -> None: + body = b"" + more_body = True + while more_body: + message = await receive() + body += message.get("body", b"") + more_body = message.get("more_body", False) + environ = build_environ(self.scope, body) + + async with anyio.create_task_group() as task_group: + task_group.start_soon(self.sender, send) + async with self.stream_send: + await anyio.to_thread.run_sync(self.wsgi, environ, self.start_response) + if self.exc_info is not None: + raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2]) + + async def sender(self, send: Send) -> None: + async with self.stream_receive: + async for message in self.stream_receive: + await send(message) + + def start_response( + self, + status: str, + response_headers: typing.List[typing.Tuple[str, str]], + exc_info: typing.Any = None, + ) -> None: + self.exc_info = exc_info + if not self.response_started: + self.response_started = True + status_code_string, _ = status.split(" ", 1) + status_code = int(status_code_string) + headers = [ + (name.strip().encode("ascii").lower(), value.strip().encode("ascii")) + for name, value in response_headers + ] + anyio.from_thread.run( + self.stream_send.send, + { + "type": "http.response.start", + "status": status_code, + "headers": headers, + }, + ) + + def wsgi(self, environ: dict, start_response: typing.Callable) -> None: + for chunk in self.app(environ, start_response): + anyio.from_thread.run( + self.stream_send.send, + {"type": "http.response.body", "body": chunk, "more_body": True}, + ) + + anyio.from_thread.run( + self.stream_send.send, {"type": "http.response.body", "body": b""} + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/requests.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/requests.py new file mode 100644 index 00000000..c738ebaa --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/requests.py @@ -0,0 +1,290 @@ +import json +import typing +from collections.abc import Mapping +from http import cookies as http_cookies + +import anyio + +from starlette.datastructures import URL, Address, FormData, Headers, QueryParams, State +from starlette.formparsers import FormParser, MultiPartParser +from starlette.types import Message, Receive, Scope, Send + +try: + from multipart.multipart import parse_options_header +except ImportError: # pragma: nocover + parse_options_header = None + + +if typing.TYPE_CHECKING: + from starlette.routing import Router + + +SERVER_PUSH_HEADERS_TO_COPY = { + "accept", + "accept-encoding", + "accept-language", + "cache-control", + "user-agent", +} + + +def cookie_parser(cookie_string: str) -> typing.Dict[str, str]: + """ + This function parses a ``Cookie`` HTTP header into a dict of key/value pairs. + + It attempts to mimic browser cookie parsing behavior: browsers and web servers + frequently disregard the spec (RFC 6265) when setting and reading cookies, + so we attempt to suit the common scenarios here. + + This function has been adapted from Django 3.1.0. + Note: we are explicitly _NOT_ using `SimpleCookie.load` because it is based + on an outdated spec and will fail on lots of input we want to support + """ + cookie_dict: typing.Dict[str, str] = {} + for chunk in cookie_string.split(";"): + if "=" in chunk: + key, val = chunk.split("=", 1) + else: + # Assume an empty name per + # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 + key, val = "", chunk + key, val = key.strip(), val.strip() + if key or val: + # unquote using Python's algorithm. + cookie_dict[key] = http_cookies._unquote(val) + return cookie_dict + + +class ClientDisconnect(Exception): + pass + + +class HTTPConnection(Mapping): + """ + A base class for incoming HTTP connections, that is used to provide + any functionality that is common to both `Request` and `WebSocket`. + """ + + def __init__(self, scope: Scope, receive: typing.Optional[Receive] = None) -> None: + assert scope["type"] in ("http", "websocket") + self.scope = scope + + def __getitem__(self, key: str) -> typing.Any: + return self.scope[key] + + def __iter__(self) -> typing.Iterator[str]: + return iter(self.scope) + + def __len__(self) -> int: + return len(self.scope) + + # Don't use the `abc.Mapping.__eq__` implementation. + # Connection instances should never be considered equal + # unless `self is other`. + __eq__ = object.__eq__ + __hash__ = object.__hash__ + + @property + def app(self) -> typing.Any: + return self.scope["app"] + + @property + def url(self) -> URL: + if not hasattr(self, "_url"): + self._url = URL(scope=self.scope) + return self._url + + @property + def base_url(self) -> URL: + if not hasattr(self, "_base_url"): + base_url_scope = dict(self.scope) + base_url_scope["path"] = "/" + base_url_scope["query_string"] = b"" + base_url_scope["root_path"] = base_url_scope.get( + "app_root_path", base_url_scope.get("root_path", "") + ) + self._base_url = URL(scope=base_url_scope) + return self._base_url + + @property + def headers(self) -> Headers: + if not hasattr(self, "_headers"): + self._headers = Headers(scope=self.scope) + return self._headers + + @property + def query_params(self) -> QueryParams: + if not hasattr(self, "_query_params"): + self._query_params = QueryParams(self.scope["query_string"]) + return self._query_params + + @property + def path_params(self) -> typing.Dict[str, typing.Any]: + return self.scope.get("path_params", {}) + + @property + def cookies(self) -> typing.Dict[str, str]: + if not hasattr(self, "_cookies"): + cookies: typing.Dict[str, str] = {} + cookie_header = self.headers.get("cookie") + + if cookie_header: + cookies = cookie_parser(cookie_header) + self._cookies = cookies + return self._cookies + + @property + def client(self) -> typing.Optional[Address]: + # client is a 2 item tuple of (host, port), None or missing + host_port = self.scope.get("client") + if host_port is not None: + return Address(*host_port) + return None + + @property + def session(self) -> dict: + assert ( + "session" in self.scope + ), "SessionMiddleware must be installed to access request.session" + return self.scope["session"] + + @property + def auth(self) -> typing.Any: + assert ( + "auth" in self.scope + ), "AuthenticationMiddleware must be installed to access request.auth" + return self.scope["auth"] + + @property + def user(self) -> typing.Any: + assert ( + "user" in self.scope + ), "AuthenticationMiddleware must be installed to access request.user" + return self.scope["user"] + + @property + def state(self) -> State: + if not hasattr(self, "_state"): + # Ensure 'state' has an empty dict if it's not already populated. + self.scope.setdefault("state", {}) + # Create a state instance with a reference to the dict in which it should + # store info + self._state = State(self.scope["state"]) + return self._state + + def url_for(self, name: str, **path_params: typing.Any) -> str: + router: Router = self.scope["router"] + url_path = router.url_path_for(name, **path_params) + return url_path.make_absolute_url(base_url=self.base_url) + + +async def empty_receive() -> typing.NoReturn: + raise RuntimeError("Receive channel has not been made available") + + +async def empty_send(message: Message) -> typing.NoReturn: + raise RuntimeError("Send channel has not been made available") + + +class Request(HTTPConnection): + def __init__( + self, scope: Scope, receive: Receive = empty_receive, send: Send = empty_send + ): + super().__init__(scope) + assert scope["type"] == "http" + self._receive = receive + self._send = send + self._stream_consumed = False + self._is_disconnected = False + + @property + def method(self) -> str: + return self.scope["method"] + + @property + def receive(self) -> Receive: + return self._receive + + async def stream(self) -> typing.AsyncGenerator[bytes, None]: + if hasattr(self, "_body"): + yield self._body + yield b"" + return + + if self._stream_consumed: + raise RuntimeError("Stream consumed") + + self._stream_consumed = True + while True: + message = await self._receive() + if message["type"] == "http.request": + body = message.get("body", b"") + if body: + yield body + if not message.get("more_body", False): + break + elif message["type"] == "http.disconnect": + self._is_disconnected = True + raise ClientDisconnect() + yield b"" + + async def body(self) -> bytes: + if not hasattr(self, "_body"): + chunks = [] + async for chunk in self.stream(): + chunks.append(chunk) + self._body = b"".join(chunks) + return self._body + + async def json(self) -> typing.Any: + if not hasattr(self, "_json"): + body = await self.body() + self._json = json.loads(body) + return self._json + + async def form(self) -> FormData: + if not hasattr(self, "_form"): + assert ( + parse_options_header is not None + ), "The `python-multipart` library must be installed to use form parsing." + content_type_header = self.headers.get("Content-Type") + content_type, options = parse_options_header(content_type_header) + if content_type == b"multipart/form-data": + multipart_parser = MultiPartParser(self.headers, self.stream()) + self._form = await multipart_parser.parse() + elif content_type == b"application/x-www-form-urlencoded": + form_parser = FormParser(self.headers, self.stream()) + self._form = await form_parser.parse() + else: + self._form = FormData() + return self._form + + async def close(self) -> None: + if hasattr(self, "_form"): + await self._form.close() + + async def is_disconnected(self) -> bool: + if not self._is_disconnected: + message: Message = {} + + # If message isn't immediately available, move on + with anyio.CancelScope() as cs: + cs.cancel() + message = await self._receive() + + if message.get("type") == "http.disconnect": + self._is_disconnected = True + + return self._is_disconnected + + async def send_push_promise(self, path: str) -> None: + if "http.response.push" in self.scope.get("extensions", {}): + raw_headers = [] + for name in SERVER_PUSH_HEADERS_TO_COPY: + for value in self.headers.getlist(name): + raw_headers.append( + (name.encode("latin-1"), value.encode("latin-1")) + ) + await self._send( + {"type": "http.response.push", "path": path, "headers": raw_headers} + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/responses.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/responses.py new file mode 100644 index 00000000..bc73cb15 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/responses.py @@ -0,0 +1,349 @@ +import http.cookies +import json +import os +import stat +import sys +import typing +from email.utils import formatdate +from functools import partial +from mimetypes import guess_type as mimetypes_guess_type +from urllib.parse import quote + +import anyio + +from starlette._compat import md5_hexdigest +from starlette.background import BackgroundTask +from starlette.concurrency import iterate_in_threadpool +from starlette.datastructures import URL, MutableHeaders +from starlette.types import Receive, Scope, Send + +# Workaround for adding samesite support to pre 3.8 python +http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore + + +# Compatibility wrapper for `mimetypes.guess_type` to support `os.PathLike` on typing.Tuple[typing.Optional[str], typing.Optional[str]]: + if sys.version_info < (3, 8): # pragma: no cover + url = os.fspath(url) + return mimetypes_guess_type(url, strict) + + +class Response: + media_type = None + charset = "utf-8" + + def __init__( + self, + content: typing.Any = None, + status_code: int = 200, + headers: typing.Optional[typing.Mapping[str, str]] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + ) -> None: + self.status_code = status_code + if media_type is not None: + self.media_type = media_type + self.background = background + self.body = self.render(content) + self.init_headers(headers) + + def render(self, content: typing.Any) -> bytes: + if content is None: + return b"" + if isinstance(content, bytes): + return content + return content.encode(self.charset) + + def init_headers( + self, headers: typing.Optional[typing.Mapping[str, str]] = None + ) -> None: + if headers is None: + raw_headers: typing.List[typing.Tuple[bytes, bytes]] = [] + populate_content_length = True + populate_content_type = True + else: + raw_headers = [ + (k.lower().encode("latin-1"), v.encode("latin-1")) + for k, v in headers.items() + ] + keys = [h[0] for h in raw_headers] + populate_content_length = b"content-length" not in keys + populate_content_type = b"content-type" not in keys + + body = getattr(self, "body", None) + if ( + body is not None + and populate_content_length + and not (self.status_code < 200 or self.status_code in (204, 304)) + ): + content_length = str(len(body)) + raw_headers.append((b"content-length", content_length.encode("latin-1"))) + + content_type = self.media_type + if content_type is not None and populate_content_type: + if content_type.startswith("text/"): + content_type += "; charset=" + self.charset + raw_headers.append((b"content-type", content_type.encode("latin-1"))) + + self.raw_headers = raw_headers + + @property + def headers(self) -> MutableHeaders: + if not hasattr(self, "_headers"): + self._headers = MutableHeaders(raw=self.raw_headers) + return self._headers + + def set_cookie( + self, + key: str, + value: str = "", + max_age: typing.Optional[int] = None, + expires: typing.Optional[int] = None, + path: str = "/", + domain: typing.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: str = "lax", + ) -> None: + cookie: http.cookies.BaseCookie = http.cookies.SimpleCookie() + cookie[key] = value + if max_age is not None: + cookie[key]["max-age"] = max_age + if expires is not None: + cookie[key]["expires"] = expires + if path is not None: + cookie[key]["path"] = path + if domain is not None: + cookie[key]["domain"] = domain + if secure: + cookie[key]["secure"] = True + if httponly: + cookie[key]["httponly"] = True + if samesite is not None: + assert samesite.lower() in [ + "strict", + "lax", + "none", + ], "samesite must be either 'strict', 'lax' or 'none'" + cookie[key]["samesite"] = samesite + cookie_val = cookie.output(header="").strip() + self.raw_headers.append((b"set-cookie", cookie_val.encode("latin-1"))) + + def delete_cookie( + self, + key: str, + path: str = "/", + domain: typing.Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: str = "lax", + ) -> None: + self.set_cookie( + key, + max_age=0, + expires=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + await send( + { + "type": "http.response.start", + "status": self.status_code, + "headers": self.raw_headers, + } + ) + await send({"type": "http.response.body", "body": self.body}) + + if self.background is not None: + await self.background() + + +class HTMLResponse(Response): + media_type = "text/html" + + +class PlainTextResponse(Response): + media_type = "text/plain" + + +class JSONResponse(Response): + media_type = "application/json" + + def __init__( + self, + content: typing.Any, + status_code: int = 200, + headers: typing.Optional[dict] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + ) -> None: + super().__init__(content, status_code, headers, media_type, background) + + def render(self, content: typing.Any) -> bytes: + return json.dumps( + content, + ensure_ascii=False, + allow_nan=False, + indent=None, + separators=(",", ":"), + ).encode("utf-8") + + +class RedirectResponse(Response): + def __init__( + self, + url: typing.Union[str, URL], + status_code: int = 307, + headers: typing.Optional[typing.Mapping[str, str]] = None, + background: typing.Optional[BackgroundTask] = None, + ) -> None: + super().__init__( + content=b"", status_code=status_code, headers=headers, background=background + ) + self.headers["location"] = quote(str(url), safe=":/%#?=@[]!$&'()*+,;") + + +class StreamingResponse(Response): + def __init__( + self, + content: typing.Any, + status_code: int = 200, + headers: typing.Optional[typing.Mapping[str, str]] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + ) -> None: + if isinstance(content, typing.AsyncIterable): + self.body_iterator = content + else: + self.body_iterator = iterate_in_threadpool(content) + self.status_code = status_code + self.media_type = self.media_type if media_type is None else media_type + self.background = background + self.init_headers(headers) + + async def listen_for_disconnect(self, receive: Receive) -> None: + while True: + message = await receive() + if message["type"] == "http.disconnect": + break + + async def stream_response(self, send: Send) -> None: + await send( + { + "type": "http.response.start", + "status": self.status_code, + "headers": self.raw_headers, + } + ) + async for chunk in self.body_iterator: + if not isinstance(chunk, bytes): + chunk = chunk.encode(self.charset) + await send({"type": "http.response.body", "body": chunk, "more_body": True}) + + await send({"type": "http.response.body", "body": b"", "more_body": False}) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + async with anyio.create_task_group() as task_group: + + async def wrap(func: typing.Callable[[], typing.Coroutine]) -> None: + await func() + task_group.cancel_scope.cancel() + + task_group.start_soon(wrap, partial(self.stream_response, send)) + await wrap(partial(self.listen_for_disconnect, receive)) + + if self.background is not None: + await self.background() + + +class FileResponse(Response): + chunk_size = 64 * 1024 + + def __init__( + self, + path: typing.Union[str, "os.PathLike[str]"], + status_code: int = 200, + headers: typing.Optional[typing.Mapping[str, str]] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + filename: typing.Optional[str] = None, + stat_result: typing.Optional[os.stat_result] = None, + method: typing.Optional[str] = None, + content_disposition_type: str = "attachment", + ) -> None: + self.path = path + self.status_code = status_code + self.filename = filename + self.send_header_only = method is not None and method.upper() == "HEAD" + if media_type is None: + media_type = guess_type(filename or path)[0] or "text/plain" + self.media_type = media_type + self.background = background + self.init_headers(headers) + if self.filename is not None: + content_disposition_filename = quote(self.filename) + if content_disposition_filename != self.filename: + content_disposition = "{}; filename*=utf-8''{}".format( + content_disposition_type, content_disposition_filename + ) + else: + content_disposition = '{}; filename="{}"'.format( + content_disposition_type, self.filename + ) + self.headers.setdefault("content-disposition", content_disposition) + self.stat_result = stat_result + if stat_result is not None: + self.set_stat_headers(stat_result) + + def set_stat_headers(self, stat_result: os.stat_result) -> None: + content_length = str(stat_result.st_size) + last_modified = formatdate(stat_result.st_mtime, usegmt=True) + etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size) + etag = md5_hexdigest(etag_base.encode(), usedforsecurity=False) + + self.headers.setdefault("content-length", content_length) + self.headers.setdefault("last-modified", last_modified) + self.headers.setdefault("etag", etag) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.stat_result is None: + try: + stat_result = await anyio.to_thread.run_sync(os.stat, self.path) + self.set_stat_headers(stat_result) + except FileNotFoundError: + raise RuntimeError(f"File at path {self.path} does not exist.") + else: + mode = stat_result.st_mode + if not stat.S_ISREG(mode): + raise RuntimeError(f"File at path {self.path} is not a file.") + await send( + { + "type": "http.response.start", + "status": self.status_code, + "headers": self.raw_headers, + } + ) + if self.send_header_only: + await send({"type": "http.response.body", "body": b"", "more_body": False}) + else: + async with await anyio.open_file(self.path, mode="rb") as file: + more_body = True + while more_body: + chunk = await file.read(self.chunk_size) + more_body = len(chunk) == self.chunk_size + await send( + { + "type": "http.response.body", + "body": chunk, + "more_body": more_body, + } + ) + if self.background is not None: + await self.background() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/routing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/routing.py new file mode 100644 index 00000000..7b7dc9b7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/routing.py @@ -0,0 +1,832 @@ +import asyncio +import contextlib +import functools +import inspect +import re +import sys +import traceback +import types +import typing +import warnings +from enum import Enum + +from starlette.concurrency import run_in_threadpool +from starlette.convertors import CONVERTOR_TYPES, Convertor +from starlette.datastructures import URL, Headers, URLPath +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import PlainTextResponse, RedirectResponse +from starlette.types import ASGIApp, Receive, Scope, Send +from starlette.websockets import WebSocket, WebSocketClose + +if sys.version_info >= (3, 7): + from contextlib import asynccontextmanager # pragma: no cover +else: + from contextlib2 import asynccontextmanager # pragma: no cover + + +class NoMatchFound(Exception): + """ + Raised by `.url_for(name, **path_params)` and `.url_path_for(name, **path_params)` + if no matching route exists. + """ + + def __init__(self, name: str, path_params: typing.Dict[str, typing.Any]) -> None: + params = ", ".join(list(path_params.keys())) + super().__init__(f'No route exists for name "{name}" and params "{params}".') + + +class Match(Enum): + NONE = 0 + PARTIAL = 1 + FULL = 2 + + +def iscoroutinefunction_or_partial(obj: typing.Any) -> bool: + """ + Correctly determines if an object is a coroutine function, + including those wrapped in functools.partial objects. + """ + while isinstance(obj, functools.partial): + obj = obj.func + return inspect.iscoroutinefunction(obj) + + +def request_response(func: typing.Callable) -> ASGIApp: + """ + Takes a function or coroutine `func(request) -> response`, + and returns an ASGI application. + """ + is_coroutine = iscoroutinefunction_or_partial(func) + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + request = Request(scope, receive=receive, send=send) + if is_coroutine: + response = await func(request) + else: + response = await run_in_threadpool(func, request) + await response(scope, receive, send) + + return app + + +def websocket_session(func: typing.Callable) -> ASGIApp: + """ + Takes a coroutine `func(session)`, and returns an ASGI application. + """ + # assert asyncio.iscoroutinefunction(func), "WebSocket endpoints must be async" + + async def app(scope: Scope, receive: Receive, send: Send) -> None: + session = WebSocket(scope, receive=receive, send=send) + await func(session) + + return app + + +def get_name(endpoint: typing.Callable) -> str: + if inspect.isroutine(endpoint) or inspect.isclass(endpoint): + return endpoint.__name__ + return endpoint.__class__.__name__ + + +def replace_params( + path: str, + param_convertors: typing.Dict[str, Convertor], + path_params: typing.Dict[str, str], +) -> typing.Tuple[str, dict]: + for key, value in list(path_params.items()): + if "{" + key + "}" in path: + convertor = param_convertors[key] + value = convertor.to_string(value) + path = path.replace("{" + key + "}", value) + path_params.pop(key) + return path, path_params + + +# Match parameters in URL paths, eg. '{param}', and '{param:int}' +PARAM_REGEX = re.compile("{([a-zA-Z_][a-zA-Z0-9_]*)(:[a-zA-Z_][a-zA-Z0-9_]*)?}") + + +def compile_path( + path: str, +) -> typing.Tuple[typing.Pattern, str, typing.Dict[str, Convertor]]: + """ + Given a path string, like: "/{username:str}", return a three-tuple + of (regex, format, {param_name:convertor}). + + regex: "/(?P[^/]+)" + format: "/{username}" + convertors: {"username": StringConvertor()} + """ + path_regex = "^" + path_format = "" + duplicated_params = set() + + idx = 0 + param_convertors = {} + for match in PARAM_REGEX.finditer(path): + param_name, convertor_type = match.groups("str") + convertor_type = convertor_type.lstrip(":") + assert ( + convertor_type in CONVERTOR_TYPES + ), f"Unknown path convertor '{convertor_type}'" + convertor = CONVERTOR_TYPES[convertor_type] + + path_regex += re.escape(path[idx : match.start()]) + path_regex += f"(?P<{param_name}>{convertor.regex})" + + path_format += path[idx : match.start()] + path_format += "{%s}" % param_name + + if param_name in param_convertors: + duplicated_params.add(param_name) + + param_convertors[param_name] = convertor + + idx = match.end() + + if duplicated_params: + names = ", ".join(sorted(duplicated_params)) + ending = "s" if len(duplicated_params) > 1 else "" + raise ValueError(f"Duplicated param name{ending} {names} at path {path}") + + path_regex += re.escape(path[idx:].split(":")[0]) + "$" + path_format += path[idx:] + + return re.compile(path_regex), path_format, param_convertors + + +class BaseRoute: + def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]: + raise NotImplementedError() # pragma: no cover + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + raise NotImplementedError() # pragma: no cover + + async def handle(self, scope: Scope, receive: Receive, send: Send) -> None: + raise NotImplementedError() # pragma: no cover + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + """ + A route may be used in isolation as a stand-alone ASGI app. + This is a somewhat contrived case, as they'll almost always be used + within a Router, but could be useful for some tooling and minimal apps. + """ + match, child_scope = self.matches(scope) + if match == Match.NONE: + if scope["type"] == "http": + response = PlainTextResponse("Not Found", status_code=404) + await response(scope, receive, send) + elif scope["type"] == "websocket": + websocket_close = WebSocketClose() + await websocket_close(scope, receive, send) + return + + scope.update(child_scope) + await self.handle(scope, receive, send) + + +class Route(BaseRoute): + def __init__( + self, + path: str, + endpoint: typing.Callable, + *, + methods: typing.Optional[typing.List[str]] = None, + name: typing.Optional[str] = None, + include_in_schema: bool = True, + ) -> None: + assert path.startswith("/"), "Routed paths must start with '/'" + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + self.include_in_schema = include_in_schema + + endpoint_handler = endpoint + while isinstance(endpoint_handler, functools.partial): + endpoint_handler = endpoint_handler.func + if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler): + # Endpoint is function or method. Treat it as `func(request) -> response`. + self.app = request_response(endpoint) + if methods is None: + methods = ["GET"] + else: + # Endpoint is a class. Treat it as ASGI. + self.app = endpoint + + if methods is None: + self.methods = None + else: + self.methods = {method.upper() for method in methods} + if "GET" in self.methods: + self.methods.add("HEAD") + + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + + def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]: + if scope["type"] == "http": + match = self.path_regex.match(scope["path"]) + if match: + matched_params = match.groupdict() + for key, value in matched_params.items(): + matched_params[key] = self.param_convertors[key].convert(value) + path_params = dict(scope.get("path_params", {})) + path_params.update(matched_params) + child_scope = {"endpoint": self.endpoint, "path_params": path_params} + if self.methods and scope["method"] not in self.methods: + return Match.PARTIAL, child_scope + else: + return Match.FULL, child_scope + return Match.NONE, {} + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + seen_params = set(path_params.keys()) + expected_params = set(self.param_convertors.keys()) + + if name != self.name or seen_params != expected_params: + raise NoMatchFound(name, path_params) + + path, remaining_params = replace_params( + self.path_format, self.param_convertors, path_params + ) + assert not remaining_params + return URLPath(path=path, protocol="http") + + async def handle(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.methods and scope["method"] not in self.methods: + headers = {"Allow": ", ".join(self.methods)} + if "app" in scope: + raise HTTPException(status_code=405, headers=headers) + else: + response = PlainTextResponse( + "Method Not Allowed", status_code=405, headers=headers + ) + await response(scope, receive, send) + else: + await self.app(scope, receive, send) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Route) + and self.path == other.path + and self.endpoint == other.endpoint + and self.methods == other.methods + ) + + +class WebSocketRoute(BaseRoute): + def __init__( + self, path: str, endpoint: typing.Callable, *, name: typing.Optional[str] = None + ) -> None: + assert path.startswith("/"), "Routed paths must start with '/'" + self.path = path + self.endpoint = endpoint + self.name = get_name(endpoint) if name is None else name + + endpoint_handler = endpoint + while isinstance(endpoint_handler, functools.partial): + endpoint_handler = endpoint_handler.func + if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler): + # Endpoint is function or method. Treat it as `func(websocket)`. + self.app = websocket_session(endpoint) + else: + # Endpoint is a class. Treat it as ASGI. + self.app = endpoint + + self.path_regex, self.path_format, self.param_convertors = compile_path(path) + + def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]: + if scope["type"] == "websocket": + match = self.path_regex.match(scope["path"]) + if match: + matched_params = match.groupdict() + for key, value in matched_params.items(): + matched_params[key] = self.param_convertors[key].convert(value) + path_params = dict(scope.get("path_params", {})) + path_params.update(matched_params) + child_scope = {"endpoint": self.endpoint, "path_params": path_params} + return Match.FULL, child_scope + return Match.NONE, {} + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + seen_params = set(path_params.keys()) + expected_params = set(self.param_convertors.keys()) + + if name != self.name or seen_params != expected_params: + raise NoMatchFound(name, path_params) + + path, remaining_params = replace_params( + self.path_format, self.param_convertors, path_params + ) + assert not remaining_params + return URLPath(path=path, protocol="websocket") + + async def handle(self, scope: Scope, receive: Receive, send: Send) -> None: + await self.app(scope, receive, send) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, WebSocketRoute) + and self.path == other.path + and self.endpoint == other.endpoint + ) + + +class Mount(BaseRoute): + def __init__( + self, + path: str, + app: typing.Optional[ASGIApp] = None, + routes: typing.Optional[typing.Sequence[BaseRoute]] = None, + name: typing.Optional[str] = None, + ) -> None: + assert path == "" or path.startswith("/"), "Routed paths must start with '/'" + assert ( + app is not None or routes is not None + ), "Either 'app=...', or 'routes=' must be specified" + self.path = path.rstrip("/") + if app is not None: + self.app: ASGIApp = app + else: + self.app = Router(routes=routes) + self.name = name + self.path_regex, self.path_format, self.param_convertors = compile_path( + self.path + "/{path:path}" + ) + + @property + def routes(self) -> typing.List[BaseRoute]: + return getattr(self.app, "routes", []) + + def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]: + if scope["type"] in ("http", "websocket"): + path = scope["path"] + match = self.path_regex.match(path) + if match: + matched_params = match.groupdict() + for key, value in matched_params.items(): + matched_params[key] = self.param_convertors[key].convert(value) + remaining_path = "/" + matched_params.pop("path") + matched_path = path[: -len(remaining_path)] + path_params = dict(scope.get("path_params", {})) + path_params.update(matched_params) + root_path = scope.get("root_path", "") + child_scope = { + "path_params": path_params, + "app_root_path": scope.get("app_root_path", root_path), + "root_path": root_path + matched_path, + "path": remaining_path, + "endpoint": self.app, + } + return Match.FULL, child_scope + return Match.NONE, {} + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + if self.name is not None and name == self.name and "path" in path_params: + # 'name' matches "". + path_params["path"] = path_params["path"].lstrip("/") + path, remaining_params = replace_params( + self.path_format, self.param_convertors, path_params + ) + if not remaining_params: + return URLPath(path=path) + elif self.name is None or name.startswith(self.name + ":"): + if self.name is None: + # No mount name. + remaining_name = name + else: + # 'name' matches ":". + remaining_name = name[len(self.name) + 1 :] + path_kwarg = path_params.get("path") + path_params["path"] = "" + path_prefix, remaining_params = replace_params( + self.path_format, self.param_convertors, path_params + ) + if path_kwarg is not None: + remaining_params["path"] = path_kwarg + for route in self.routes or []: + try: + url = route.url_path_for(remaining_name, **remaining_params) + return URLPath( + path=path_prefix.rstrip("/") + str(url), protocol=url.protocol + ) + except NoMatchFound: + pass + raise NoMatchFound(name, path_params) + + async def handle(self, scope: Scope, receive: Receive, send: Send) -> None: + await self.app(scope, receive, send) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Mount) + and self.path == other.path + and self.app == other.app + ) + + +class Host(BaseRoute): + def __init__( + self, host: str, app: ASGIApp, name: typing.Optional[str] = None + ) -> None: + self.host = host + self.app = app + self.name = name + self.host_regex, self.host_format, self.param_convertors = compile_path(host) + + @property + def routes(self) -> typing.List[BaseRoute]: + return getattr(self.app, "routes", []) + + def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]: + if scope["type"] in ("http", "websocket"): + headers = Headers(scope=scope) + host = headers.get("host", "").split(":")[0] + match = self.host_regex.match(host) + if match: + matched_params = match.groupdict() + for key, value in matched_params.items(): + matched_params[key] = self.param_convertors[key].convert(value) + path_params = dict(scope.get("path_params", {})) + path_params.update(matched_params) + child_scope = {"path_params": path_params, "endpoint": self.app} + return Match.FULL, child_scope + return Match.NONE, {} + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + if self.name is not None and name == self.name and "path" in path_params: + # 'name' matches "". + path = path_params.pop("path") + host, remaining_params = replace_params( + self.host_format, self.param_convertors, path_params + ) + if not remaining_params: + return URLPath(path=path, host=host) + elif self.name is None or name.startswith(self.name + ":"): + if self.name is None: + # No mount name. + remaining_name = name + else: + # 'name' matches ":". + remaining_name = name[len(self.name) + 1 :] + host, remaining_params = replace_params( + self.host_format, self.param_convertors, path_params + ) + for route in self.routes or []: + try: + url = route.url_path_for(remaining_name, **remaining_params) + return URLPath(path=str(url), protocol=url.protocol, host=host) + except NoMatchFound: + pass + raise NoMatchFound(name, path_params) + + async def handle(self, scope: Scope, receive: Receive, send: Send) -> None: + await self.app(scope, receive, send) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Host) + and self.host == other.host + and self.app == other.app + ) + + +_T = typing.TypeVar("_T") + + +class _AsyncLiftContextManager(typing.AsyncContextManager[_T]): + def __init__(self, cm: typing.ContextManager[_T]): + self._cm = cm + + async def __aenter__(self) -> _T: + return self._cm.__enter__() + + async def __aexit__( + self, + exc_type: typing.Optional[typing.Type[BaseException]], + exc_value: typing.Optional[BaseException], + traceback: typing.Optional[types.TracebackType], + ) -> typing.Optional[bool]: + return self._cm.__exit__(exc_type, exc_value, traceback) + + +def _wrap_gen_lifespan_context( + lifespan_context: typing.Callable[[typing.Any], typing.Generator] +) -> typing.Callable[[typing.Any], typing.AsyncContextManager]: + cmgr = contextlib.contextmanager(lifespan_context) + + @functools.wraps(cmgr) + def wrapper(app: typing.Any) -> _AsyncLiftContextManager: + return _AsyncLiftContextManager(cmgr(app)) + + return wrapper + + +class _DefaultLifespan: + def __init__(self, router: "Router"): + self._router = router + + async def __aenter__(self) -> None: + await self._router.startup() + + async def __aexit__(self, *exc_info: object) -> None: + await self._router.shutdown() + + def __call__(self: _T, app: object) -> _T: + return self + + +class Router: + def __init__( + self, + routes: typing.Optional[typing.Sequence[BaseRoute]] = None, + redirect_slashes: bool = True, + default: typing.Optional[ASGIApp] = None, + on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None, + on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None, + lifespan: typing.Optional[ + typing.Callable[[typing.Any], typing.AsyncContextManager] + ] = None, + ) -> None: + self.routes = [] if routes is None else list(routes) + self.redirect_slashes = redirect_slashes + self.default = self.not_found if default is None else default + self.on_startup = [] if on_startup is None else list(on_startup) + self.on_shutdown = [] if on_shutdown is None else list(on_shutdown) + + if lifespan is None: + self.lifespan_context: typing.Callable[ + [typing.Any], typing.AsyncContextManager + ] = _DefaultLifespan(self) + + elif inspect.isasyncgenfunction(lifespan): + warnings.warn( + "async generator function lifespans are deprecated, " + "use an @contextlib.asynccontextmanager function instead", + DeprecationWarning, + ) + self.lifespan_context = asynccontextmanager( + lifespan, # type: ignore[arg-type] + ) + elif inspect.isgeneratorfunction(lifespan): + warnings.warn( + "generator function lifespans are deprecated, " + "use an @contextlib.asynccontextmanager function instead", + DeprecationWarning, + ) + self.lifespan_context = _wrap_gen_lifespan_context( + lifespan, # type: ignore[arg-type] + ) + else: + self.lifespan_context = lifespan + + async def not_found(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] == "websocket": + websocket_close = WebSocketClose() + await websocket_close(scope, receive, send) + return + + # If we're running inside a starlette application then raise an + # exception, so that the configurable exception handler can deal with + # returning the response. For plain ASGI apps, just return the response. + if "app" in scope: + raise HTTPException(status_code=404) + else: + response = PlainTextResponse("Not Found", status_code=404) + await response(scope, receive, send) + + def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath: + for route in self.routes: + try: + return route.url_path_for(name, **path_params) + except NoMatchFound: + pass + raise NoMatchFound(name, path_params) + + async def startup(self) -> None: + """ + Run any `.on_startup` event handlers. + """ + for handler in self.on_startup: + if asyncio.iscoroutinefunction(handler): + await handler() + else: + handler() + + async def shutdown(self) -> None: + """ + Run any `.on_shutdown` event handlers. + """ + for handler in self.on_shutdown: + if asyncio.iscoroutinefunction(handler): + await handler() + else: + handler() + + async def lifespan(self, scope: Scope, receive: Receive, send: Send) -> None: + """ + Handle ASGI lifespan messages, which allows us to manage application + startup and shutdown events. + """ + started = False + app = scope.get("app") + await receive() + try: + async with self.lifespan_context(app): + await send({"type": "lifespan.startup.complete"}) + started = True + await receive() + except BaseException: + exc_text = traceback.format_exc() + if started: + await send({"type": "lifespan.shutdown.failed", "message": exc_text}) + else: + await send({"type": "lifespan.startup.failed", "message": exc_text}) + raise + else: + await send({"type": "lifespan.shutdown.complete"}) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + """ + The main entry point to the Router class. + """ + assert scope["type"] in ("http", "websocket", "lifespan") + + if "router" not in scope: + scope["router"] = self + + if scope["type"] == "lifespan": + await self.lifespan(scope, receive, send) + return + + partial = None + + for route in self.routes: + # Determine if any route matches the incoming scope, + # and hand over to the matching route if found. + match, child_scope = route.matches(scope) + if match == Match.FULL: + scope.update(child_scope) + await route.handle(scope, receive, send) + return + elif match == Match.PARTIAL and partial is None: + partial = route + partial_scope = child_scope + + if partial is not None: + #  Handle partial matches. These are cases where an endpoint is + # able to handle the request, but is not a preferred option. + # We use this in particular to deal with "405 Method Not Allowed". + scope.update(partial_scope) + await partial.handle(scope, receive, send) + return + + if scope["type"] == "http" and self.redirect_slashes and scope["path"] != "/": + redirect_scope = dict(scope) + if scope["path"].endswith("/"): + redirect_scope["path"] = redirect_scope["path"].rstrip("/") + else: + redirect_scope["path"] = redirect_scope["path"] + "/" + + for route in self.routes: + match, child_scope = route.matches(redirect_scope) + if match != Match.NONE: + redirect_url = URL(scope=redirect_scope) + response = RedirectResponse(url=str(redirect_url)) + await response(scope, receive, send) + return + + await self.default(scope, receive, send) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, Router) and self.routes == other.routes + + # The following usages are now discouraged in favour of configuration + #  during Router.__init__(...) + def mount( + self, path: str, app: ASGIApp, name: typing.Optional[str] = None + ) -> None: # pragma: nocover + """ + We no longer document this API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Mount(path, ...), + ... + ] + + app = Starlette(routes=routes) + """ + + route = Mount(path, app=app, name=name) + self.routes.append(route) + + def host( + self, host: str, app: ASGIApp, name: typing.Optional[str] = None + ) -> None: # pragma: no cover + """ + We no longer document this API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Host(path, ...), + ... + ] + + app = Starlette(routes=routes) + """ + + route = Host(host, app=app, name=name) + self.routes.append(route) + + def add_route( + self, + path: str, + endpoint: typing.Callable, + methods: typing.Optional[typing.List[str]] = None, + name: typing.Optional[str] = None, + include_in_schema: bool = True, + ) -> None: # pragma: nocover + route = Route( + path, + endpoint=endpoint, + methods=methods, + name=name, + include_in_schema=include_in_schema, + ) + self.routes.append(route) + + def add_websocket_route( + self, path: str, endpoint: typing.Callable, name: typing.Optional[str] = None + ) -> None: # pragma: no cover + route = WebSocketRoute(path, endpoint=endpoint, name=name) + self.routes.append(route) + + def route( + self, + path: str, + methods: typing.Optional[typing.List[str]] = None, + name: typing.Optional[str] = None, + include_in_schema: bool = True, + ) -> typing.Callable: # pragma: nocover + """ + We no longer document this decorator style API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + Route(path, endpoint=..., ...), + ... + ] + + app = Starlette(routes=routes) + """ + + def decorator(func: typing.Callable) -> typing.Callable: + self.add_route( + path, + func, + methods=methods, + name=name, + include_in_schema=include_in_schema, + ) + return func + + return decorator + + def websocket_route( + self, path: str, name: typing.Optional[str] = None + ) -> typing.Callable: # pragma: nocover + """ + We no longer document this decorator style API, and its usage is discouraged. + Instead you should use the following approach: + + routes = [ + WebSocketRoute(path, endpoint=..., ...), + ... + ] + + app = Starlette(routes=routes) + """ + + def decorator(func: typing.Callable) -> typing.Callable: + self.add_websocket_route(path, func, name=name) + return func + + return decorator + + def add_event_handler( + self, event_type: str, func: typing.Callable + ) -> None: # pragma: no cover + assert event_type in ("startup", "shutdown") + + if event_type == "startup": + self.on_startup.append(func) + else: + self.on_shutdown.append(func) + + def on_event(self, event_type: str) -> typing.Callable: # pragma: nocover + def decorator(func: typing.Callable) -> typing.Callable: + self.add_event_handler(event_type, func) + return func + + return decorator diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/schemas.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/schemas.py new file mode 100644 index 00000000..6ca764fd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/schemas.py @@ -0,0 +1,135 @@ +import inspect +import typing + +from starlette.requests import Request +from starlette.responses import Response +from starlette.routing import BaseRoute, Mount, Route + +try: + import yaml +except ImportError: # pragma: nocover + yaml = None # type: ignore + + +class OpenAPIResponse(Response): + media_type = "application/vnd.oai.openapi" + + def render(self, content: typing.Any) -> bytes: + assert yaml is not None, "`pyyaml` must be installed to use OpenAPIResponse." + assert isinstance( + content, dict + ), "The schema passed to OpenAPIResponse should be a dictionary." + return yaml.dump(content, default_flow_style=False).encode("utf-8") + + +class EndpointInfo(typing.NamedTuple): + path: str + http_method: str + func: typing.Callable + + +class BaseSchemaGenerator: + def get_schema(self, routes: typing.List[BaseRoute]) -> dict: + raise NotImplementedError() # pragma: no cover + + def get_endpoints( + self, routes: typing.List[BaseRoute] + ) -> typing.List[EndpointInfo]: + """ + Given the routes, yields the following information: + + - path + eg: /users/ + - http_method + one of 'get', 'post', 'put', 'patch', 'delete', 'options' + - func + method ready to extract the docstring + """ + endpoints_info: list = [] + + for route in routes: + if isinstance(route, Mount): + routes = route.routes or [] + sub_endpoints = [ + EndpointInfo( + path="".join((route.path, sub_endpoint.path)), + http_method=sub_endpoint.http_method, + func=sub_endpoint.func, + ) + for sub_endpoint in self.get_endpoints(routes) + ] + endpoints_info.extend(sub_endpoints) + + elif not isinstance(route, Route) or not route.include_in_schema: + continue + + elif inspect.isfunction(route.endpoint) or inspect.ismethod(route.endpoint): + for method in route.methods or ["GET"]: + if method == "HEAD": + continue + endpoints_info.append( + EndpointInfo(route.path, method.lower(), route.endpoint) + ) + else: + for method in ["get", "post", "put", "patch", "delete", "options"]: + if not hasattr(route.endpoint, method): + continue + func = getattr(route.endpoint, method) + endpoints_info.append( + EndpointInfo(route.path, method.lower(), func) + ) + + return endpoints_info + + def parse_docstring(self, func_or_method: typing.Callable) -> dict: + """ + Given a function, parse the docstring as YAML and return a dictionary of info. + """ + docstring = func_or_method.__doc__ + if not docstring: + return {} + + assert yaml is not None, "`pyyaml` must be installed to use parse_docstring." + + # We support having regular docstrings before the schema + # definition. Here we return just the schema part from + # the docstring. + docstring = docstring.split("---")[-1] + + parsed = yaml.safe_load(docstring) + + if not isinstance(parsed, dict): + # A regular docstring (not yaml formatted) can return + # a simple string here, which wouldn't follow the schema. + return {} + + return parsed + + def OpenAPIResponse(self, request: Request) -> Response: + routes = request.app.routes + schema = self.get_schema(routes=routes) + return OpenAPIResponse(schema) + + +class SchemaGenerator(BaseSchemaGenerator): + def __init__(self, base_schema: dict) -> None: + self.base_schema = base_schema + + def get_schema(self, routes: typing.List[BaseRoute]) -> dict: + schema = dict(self.base_schema) + schema.setdefault("paths", {}) + endpoints_info = self.get_endpoints(routes) + + for endpoint in endpoints_info: + + parsed = self.parse_docstring(endpoint.func) + + if not parsed: + continue + + if endpoint.path not in schema["paths"]: + schema["paths"][endpoint.path] = {} + + schema["paths"][endpoint.path][endpoint.http_method] = parsed + + return schema diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/staticfiles.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/staticfiles.py new file mode 100644 index 00000000..d09630f3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/staticfiles.py @@ -0,0 +1,240 @@ +import importlib.util +import os +import stat +import typing +from email.utils import parsedate + +import anyio + +from starlette.datastructures import URL, Headers +from starlette.exceptions import HTTPException +from starlette.responses import FileResponse, RedirectResponse, Response +from starlette.types import Receive, Scope, Send + +PathLike = typing.Union[str, "os.PathLike[str]"] + + +class NotModifiedResponse(Response): + NOT_MODIFIED_HEADERS = ( + "cache-control", + "content-location", + "date", + "etag", + "expires", + "vary", + ) + + def __init__(self, headers: Headers): + super().__init__( + status_code=304, + headers={ + name: value + for name, value in headers.items() + if name in self.NOT_MODIFIED_HEADERS + }, + ) + + +class StaticFiles: + def __init__( + self, + *, + directory: typing.Optional[PathLike] = None, + packages: typing.Optional[ + typing.List[typing.Union[str, typing.Tuple[str, str]]] + ] = None, + html: bool = False, + check_dir: bool = True, + ) -> None: + self.directory = directory + self.packages = packages + self.all_directories = self.get_directories(directory, packages) + self.html = html + self.config_checked = False + if check_dir and directory is not None and not os.path.isdir(directory): + raise RuntimeError(f"Directory '{directory}' does not exist") + + def get_directories( + self, + directory: typing.Optional[PathLike] = None, + packages: typing.Optional[ + typing.List[typing.Union[str, typing.Tuple[str, str]]] + ] = None, + ) -> typing.List[PathLike]: + """ + Given `directory` and `packages` arguments, return a list of all the + directories that should be used for serving static files from. + """ + directories = [] + if directory is not None: + directories.append(directory) + + for package in packages or []: + if isinstance(package, tuple): + package, statics_dir = package + else: + statics_dir = "statics" + spec = importlib.util.find_spec(package) + assert spec is not None, f"Package {package!r} could not be found." + assert spec.origin is not None, f"Package {package!r} could not be found." + package_directory = os.path.normpath( + os.path.join(spec.origin, "..", statics_dir) + ) + assert os.path.isdir( + package_directory + ), f"Directory '{statics_dir!r}' in package {package!r} could not be found." + directories.append(package_directory) + + return directories + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + """ + The ASGI entry point. + """ + assert scope["type"] == "http" + + if not self.config_checked: + await self.check_config() + self.config_checked = True + + path = self.get_path(scope) + response = await self.get_response(path, scope) + await response(scope, receive, send) + + def get_path(self, scope: Scope) -> str: + """ + Given the ASGI scope, return the `path` string to serve up, + with OS specific path separators, and any '..', '.' components removed. + """ + return os.path.normpath(os.path.join(*scope["path"].split("/"))) + + async def get_response(self, path: str, scope: Scope) -> Response: + """ + Returns an HTTP response, given the incoming path, method and request headers. + """ + if scope["method"] not in ("GET", "HEAD"): + raise HTTPException(status_code=405) + + try: + full_path, stat_result = await anyio.to_thread.run_sync( + self.lookup_path, path + ) + except PermissionError: + raise HTTPException(status_code=401) + except OSError: + raise + + if stat_result and stat.S_ISREG(stat_result.st_mode): + # We have a static file to serve. + return self.file_response(full_path, stat_result, scope) + + elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html: + # We're in HTML mode, and have got a directory URL. + # Check if we have 'index.html' file to serve. + index_path = os.path.join(path, "index.html") + full_path, stat_result = await anyio.to_thread.run_sync( + self.lookup_path, index_path + ) + if stat_result is not None and stat.S_ISREG(stat_result.st_mode): + if not scope["path"].endswith("/"): + # Directory URLs should redirect to always end in "/". + url = URL(scope=scope) + url = url.replace(path=url.path + "/") + return RedirectResponse(url=url) + return self.file_response(full_path, stat_result, scope) + + if self.html: + # Check for '404.html' if we're in HTML mode. + full_path, stat_result = await anyio.to_thread.run_sync( + self.lookup_path, "404.html" + ) + if stat_result and stat.S_ISREG(stat_result.st_mode): + return FileResponse( + full_path, + stat_result=stat_result, + method=scope["method"], + status_code=404, + ) + raise HTTPException(status_code=404) + + def lookup_path( + self, path: str + ) -> typing.Tuple[str, typing.Optional[os.stat_result]]: + for directory in self.all_directories: + full_path = os.path.realpath(os.path.join(directory, path)) + directory = os.path.realpath(directory) + if os.path.commonprefix([full_path, directory]) != directory: + # Don't allow misbehaving clients to break out of the static files + # directory. + continue + try: + return full_path, os.stat(full_path) + except (FileNotFoundError, NotADirectoryError): + continue + return "", None + + def file_response( + self, + full_path: PathLike, + stat_result: os.stat_result, + scope: Scope, + status_code: int = 200, + ) -> Response: + method = scope["method"] + request_headers = Headers(scope=scope) + + response = FileResponse( + full_path, status_code=status_code, stat_result=stat_result, method=method + ) + if self.is_not_modified(response.headers, request_headers): + return NotModifiedResponse(response.headers) + return response + + async def check_config(self) -> None: + """ + Perform a one-off configuration check that StaticFiles is actually + pointed at a directory, so that we can raise loud errors rather than + just returning 404 responses. + """ + if self.directory is None: + return + + try: + stat_result = await anyio.to_thread.run_sync(os.stat, self.directory) + except FileNotFoundError: + raise RuntimeError( + f"StaticFiles directory '{self.directory}' does not exist." + ) + if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)): + raise RuntimeError( + f"StaticFiles path '{self.directory}' is not a directory." + ) + + def is_not_modified( + self, response_headers: Headers, request_headers: Headers + ) -> bool: + """ + Given the request and response headers, return `True` if an HTTP + "Not Modified" response could be returned instead. + """ + try: + if_none_match = request_headers["if-none-match"] + etag = response_headers["etag"] + if if_none_match == etag: + return True + except KeyError: + pass + + try: + if_modified_since = parsedate(request_headers["if-modified-since"]) + last_modified = parsedate(response_headers["last-modified"]) + if ( + if_modified_since is not None + and last_modified is not None + and if_modified_since >= last_modified + ): + return True + except KeyError: + pass + + return False diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/status.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/status.py new file mode 100644 index 00000000..cc52d896 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/status.py @@ -0,0 +1,206 @@ +""" +HTTP codes +See HTTP Status Code Registry: +https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml + +And RFC 2324 - https://tools.ietf.org/html/rfc2324 +""" +import sys +import warnings +from typing import List + +from starlette._pep562 import pep562 + +__all__ = ( + "HTTP_100_CONTINUE", + "HTTP_101_SWITCHING_PROTOCOLS", + "HTTP_102_PROCESSING", + "HTTP_103_EARLY_HINTS", + "HTTP_200_OK", + "HTTP_201_CREATED", + "HTTP_202_ACCEPTED", + "HTTP_203_NON_AUTHORITATIVE_INFORMATION", + "HTTP_204_NO_CONTENT", + "HTTP_205_RESET_CONTENT", + "HTTP_206_PARTIAL_CONTENT", + "HTTP_207_MULTI_STATUS", + "HTTP_208_ALREADY_REPORTED", + "HTTP_226_IM_USED", + "HTTP_300_MULTIPLE_CHOICES", + "HTTP_301_MOVED_PERMANENTLY", + "HTTP_302_FOUND", + "HTTP_303_SEE_OTHER", + "HTTP_304_NOT_MODIFIED", + "HTTP_305_USE_PROXY", + "HTTP_306_RESERVED", + "HTTP_307_TEMPORARY_REDIRECT", + "HTTP_308_PERMANENT_REDIRECT", + "HTTP_400_BAD_REQUEST", + "HTTP_401_UNAUTHORIZED", + "HTTP_402_PAYMENT_REQUIRED", + "HTTP_403_FORBIDDEN", + "HTTP_404_NOT_FOUND", + "HTTP_405_METHOD_NOT_ALLOWED", + "HTTP_406_NOT_ACCEPTABLE", + "HTTP_407_PROXY_AUTHENTICATION_REQUIRED", + "HTTP_408_REQUEST_TIMEOUT", + "HTTP_409_CONFLICT", + "HTTP_410_GONE", + "HTTP_411_LENGTH_REQUIRED", + "HTTP_412_PRECONDITION_FAILED", + "HTTP_413_REQUEST_ENTITY_TOO_LARGE", + "HTTP_414_REQUEST_URI_TOO_LONG", + "HTTP_415_UNSUPPORTED_MEDIA_TYPE", + "HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE", + "HTTP_417_EXPECTATION_FAILED", + "HTTP_418_IM_A_TEAPOT", + "HTTP_421_MISDIRECTED_REQUEST", + "HTTP_422_UNPROCESSABLE_ENTITY", + "HTTP_423_LOCKED", + "HTTP_424_FAILED_DEPENDENCY", + "HTTP_425_TOO_EARLY", + "HTTP_426_UPGRADE_REQUIRED", + "HTTP_428_PRECONDITION_REQUIRED", + "HTTP_429_TOO_MANY_REQUESTS", + "HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE", + "HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS", + "HTTP_500_INTERNAL_SERVER_ERROR", + "HTTP_501_NOT_IMPLEMENTED", + "HTTP_502_BAD_GATEWAY", + "HTTP_503_SERVICE_UNAVAILABLE", + "HTTP_504_GATEWAY_TIMEOUT", + "HTTP_505_HTTP_VERSION_NOT_SUPPORTED", + "HTTP_506_VARIANT_ALSO_NEGOTIATES", + "HTTP_507_INSUFFICIENT_STORAGE", + "HTTP_508_LOOP_DETECTED", + "HTTP_510_NOT_EXTENDED", + "HTTP_511_NETWORK_AUTHENTICATION_REQUIRED", + "WS_1000_NORMAL_CLOSURE", + "WS_1001_GOING_AWAY", + "WS_1002_PROTOCOL_ERROR", + "WS_1003_UNSUPPORTED_DATA", + "WS_1005_NO_STATUS_RCVD", + "WS_1006_ABNORMAL_CLOSURE", + "WS_1007_INVALID_FRAME_PAYLOAD_DATA", + "WS_1008_POLICY_VIOLATION", + "WS_1009_MESSAGE_TOO_BIG", + "WS_1010_MANDATORY_EXT", + "WS_1011_INTERNAL_ERROR", + "WS_1012_SERVICE_RESTART", + "WS_1013_TRY_AGAIN_LATER", + "WS_1014_BAD_GATEWAY", + "WS_1015_TLS_HANDSHAKE", +) + +HTTP_100_CONTINUE = 100 +HTTP_101_SWITCHING_PROTOCOLS = 101 +HTTP_102_PROCESSING = 102 +HTTP_103_EARLY_HINTS = 103 +HTTP_200_OK = 200 +HTTP_201_CREATED = 201 +HTTP_202_ACCEPTED = 202 +HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203 +HTTP_204_NO_CONTENT = 204 +HTTP_205_RESET_CONTENT = 205 +HTTP_206_PARTIAL_CONTENT = 206 +HTTP_207_MULTI_STATUS = 207 +HTTP_208_ALREADY_REPORTED = 208 +HTTP_226_IM_USED = 226 +HTTP_300_MULTIPLE_CHOICES = 300 +HTTP_301_MOVED_PERMANENTLY = 301 +HTTP_302_FOUND = 302 +HTTP_303_SEE_OTHER = 303 +HTTP_304_NOT_MODIFIED = 304 +HTTP_305_USE_PROXY = 305 +HTTP_306_RESERVED = 306 +HTTP_307_TEMPORARY_REDIRECT = 307 +HTTP_308_PERMANENT_REDIRECT = 308 +HTTP_400_BAD_REQUEST = 400 +HTTP_401_UNAUTHORIZED = 401 +HTTP_402_PAYMENT_REQUIRED = 402 +HTTP_403_FORBIDDEN = 403 +HTTP_404_NOT_FOUND = 404 +HTTP_405_METHOD_NOT_ALLOWED = 405 +HTTP_406_NOT_ACCEPTABLE = 406 +HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407 +HTTP_408_REQUEST_TIMEOUT = 408 +HTTP_409_CONFLICT = 409 +HTTP_410_GONE = 410 +HTTP_411_LENGTH_REQUIRED = 411 +HTTP_412_PRECONDITION_FAILED = 412 +HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413 +HTTP_414_REQUEST_URI_TOO_LONG = 414 +HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415 +HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416 +HTTP_417_EXPECTATION_FAILED = 417 +HTTP_418_IM_A_TEAPOT = 418 +HTTP_421_MISDIRECTED_REQUEST = 421 +HTTP_422_UNPROCESSABLE_ENTITY = 422 +HTTP_423_LOCKED = 423 +HTTP_424_FAILED_DEPENDENCY = 424 +HTTP_425_TOO_EARLY = 425 +HTTP_426_UPGRADE_REQUIRED = 426 +HTTP_428_PRECONDITION_REQUIRED = 428 +HTTP_429_TOO_MANY_REQUESTS = 429 +HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431 +HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS = 451 +HTTP_500_INTERNAL_SERVER_ERROR = 500 +HTTP_501_NOT_IMPLEMENTED = 501 +HTTP_502_BAD_GATEWAY = 502 +HTTP_503_SERVICE_UNAVAILABLE = 503 +HTTP_504_GATEWAY_TIMEOUT = 504 +HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505 +HTTP_506_VARIANT_ALSO_NEGOTIATES = 506 +HTTP_507_INSUFFICIENT_STORAGE = 507 +HTTP_508_LOOP_DETECTED = 508 +HTTP_510_NOT_EXTENDED = 510 +HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511 + + +""" +WebSocket codes +https://www.iana.org/assignments/websocket/websocket.xml#close-code-number +https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent +""" +WS_1000_NORMAL_CLOSURE = 1000 +WS_1001_GOING_AWAY = 1001 +WS_1002_PROTOCOL_ERROR = 1002 +WS_1003_UNSUPPORTED_DATA = 1003 +WS_1005_NO_STATUS_RCVD = 1005 +WS_1006_ABNORMAL_CLOSURE = 1006 +WS_1007_INVALID_FRAME_PAYLOAD_DATA = 1007 +WS_1008_POLICY_VIOLATION = 1008 +WS_1009_MESSAGE_TOO_BIG = 1009 +WS_1010_MANDATORY_EXT = 1010 +WS_1011_INTERNAL_ERROR = 1011 +WS_1012_SERVICE_RESTART = 1012 +WS_1013_TRY_AGAIN_LATER = 1013 +WS_1014_BAD_GATEWAY = 1014 +WS_1015_TLS_HANDSHAKE = 1015 + + +__deprecated__ = {"WS_1004_NO_STATUS_RCVD": 1004, "WS_1005_ABNORMAL_CLOSURE": 1005} + + +def __getattr__(name: str) -> int: + deprecation_changes = { + "WS_1004_NO_STATUS_RCVD": "WS_1005_NO_STATUS_RCVD", + "WS_1005_ABNORMAL_CLOSURE": "WS_1006_ABNORMAL_CLOSURE", + } + deprecated = __deprecated__.get(name) + if deprecated: + stacklevel = 3 if sys.version_info >= (3, 7) else 4 + warnings.warn( + f"'{name}' is deprecated. Use '{deprecation_changes[name]}' instead.", + category=DeprecationWarning, + stacklevel=stacklevel, + ) + return deprecated + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +def __dir__() -> List[str]: + return sorted(list(__all__) + list(__deprecated__.keys())) # pragma: no cover + + +pep562(__name__) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/templating.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/templating.py new file mode 100644 index 00000000..99035837 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/templating.py @@ -0,0 +1,105 @@ +import typing +from os import PathLike + +from starlette.background import BackgroundTask +from starlette.responses import Response +from starlette.types import Receive, Scope, Send + +try: + import jinja2 + + # @contextfunction was renamed to @pass_context in Jinja 3.0, and was removed in 3.1 + # hence we try to get pass_context (most installs will be >=3.1) + # and fall back to contextfunction, + # adding a type ignore for mypy to let us access an attribute that may not exist + if hasattr(jinja2, "pass_context"): + pass_context = jinja2.pass_context + else: # pragma: nocover + pass_context = jinja2.contextfunction # type: ignore[attr-defined] +except ImportError: # pragma: nocover + jinja2 = None # type: ignore + + +class _TemplateResponse(Response): + media_type = "text/html" + + def __init__( + self, + template: typing.Any, + context: dict, + status_code: int = 200, + headers: typing.Optional[typing.Mapping[str, str]] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + ): + self.template = template + self.context = context + content = template.render(context) + super().__init__(content, status_code, headers, media_type, background) + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + request = self.context.get("request", {}) + extensions = request.get("extensions", {}) + if "http.response.template" in extensions: + await send( + { + "type": "http.response.template", + "template": self.template, + "context": self.context, + } + ) + await super().__call__(scope, receive, send) + + +class Jinja2Templates: + """ + templates = Jinja2Templates("templates") + + return templates.TemplateResponse("index.html", {"request": request}) + """ + + def __init__( + self, directory: typing.Union[str, PathLike], **env_options: typing.Any + ) -> None: + assert jinja2 is not None, "jinja2 must be installed to use Jinja2Templates" + self.env = self._create_env(directory, **env_options) + + def _create_env( + self, directory: typing.Union[str, PathLike], **env_options: typing.Any + ) -> "jinja2.Environment": + @pass_context + def url_for(context: dict, name: str, **path_params: typing.Any) -> str: + request = context["request"] + return request.url_for(name, **path_params) + + loader = jinja2.FileSystemLoader(directory) + env_options.setdefault("loader", loader) + env_options.setdefault("autoescape", True) + + env = jinja2.Environment(**env_options) + env.globals["url_for"] = url_for + return env + + def get_template(self, name: str) -> "jinja2.Template": + return self.env.get_template(name) + + def TemplateResponse( + self, + name: str, + context: dict, + status_code: int = 200, + headers: typing.Optional[typing.Mapping[str, str]] = None, + media_type: typing.Optional[str] = None, + background: typing.Optional[BackgroundTask] = None, + ) -> _TemplateResponse: + if "request" not in context: + raise ValueError('context must include a "request" key') + template = self.get_template(name) + return _TemplateResponse( + template, + context, + status_code=status_code, + headers=headers, + media_type=media_type, + background=background, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/testclient.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/testclient.py new file mode 100644 index 00000000..7567d18f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/testclient.py @@ -0,0 +1,584 @@ +import asyncio +import contextlib +import http +import inspect +import io +import json +import math +import queue +import sys +import types +import typing +from concurrent.futures import Future +from urllib.parse import unquote, urljoin, urlsplit + +import anyio.abc +import requests +from anyio.streams.stapled import StapledObjectStream + +from starlette.types import Message, Receive, Scope, Send +from starlette.websockets import WebSocketDisconnect + +if sys.version_info >= (3, 8): # pragma: no cover + from typing import TypedDict +else: # pragma: no cover + from typing_extensions import TypedDict + + +_PortalFactoryType = typing.Callable[ + [], typing.ContextManager[anyio.abc.BlockingPortal] +] + + +# Annotations for `Session.request()` +Cookies = typing.Union[ + typing.MutableMapping[str, str], requests.cookies.RequestsCookieJar +] +Params = typing.Union[bytes, typing.MutableMapping[str, str]] +DataType = typing.Union[bytes, typing.MutableMapping[str, str], typing.IO] +TimeOut = typing.Union[float, typing.Tuple[float, float]] +FileType = typing.MutableMapping[str, typing.IO] +AuthType = typing.Union[ + typing.Tuple[str, str], + requests.auth.AuthBase, + typing.Callable[[requests.PreparedRequest], requests.PreparedRequest], +] + + +ASGIInstance = typing.Callable[[Receive, Send], typing.Awaitable[None]] +ASGI2App = typing.Callable[[Scope], ASGIInstance] +ASGI3App = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]] + + +class _HeaderDict(requests.packages.urllib3._collections.HTTPHeaderDict): + def get_all(self, key: str, default: str) -> str: + return self.getheaders(key) + + +class _MockOriginalResponse: + """ + We have to jump through some hoops to present the response as if + it was made using urllib3. + """ + + def __init__(self, headers: typing.List[typing.Tuple[bytes, bytes]]) -> None: + self.msg = _HeaderDict(headers) + self.closed = False + + def isclosed(self) -> bool: + return self.closed + + +class _Upgrade(Exception): + def __init__(self, session: "WebSocketTestSession") -> None: + self.session = session + + +def _get_reason_phrase(status_code: int) -> str: + try: + return http.HTTPStatus(status_code).phrase + except ValueError: + return "" + + +def _is_asgi3(app: typing.Union[ASGI2App, ASGI3App]) -> bool: + if inspect.isclass(app): + return hasattr(app, "__await__") + elif inspect.isfunction(app): + return asyncio.iscoroutinefunction(app) + call = getattr(app, "__call__", None) + return asyncio.iscoroutinefunction(call) + + +class _WrapASGI2: + """ + Provide an ASGI3 interface onto an ASGI2 app. + """ + + def __init__(self, app: ASGI2App) -> None: + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + instance = self.app(scope) + await instance(receive, send) + + +class _AsyncBackend(TypedDict): + backend: str + backend_options: typing.Dict[str, typing.Any] + + +class _ASGIAdapter(requests.adapters.HTTPAdapter): + def __init__( + self, + app: ASGI3App, + portal_factory: _PortalFactoryType, + raise_server_exceptions: bool = True, + root_path: str = "", + ) -> None: + self.app = app + self.raise_server_exceptions = raise_server_exceptions + self.root_path = root_path + self.portal_factory = portal_factory + + def send( + self, request: requests.PreparedRequest, *args: typing.Any, **kwargs: typing.Any + ) -> requests.Response: + scheme, netloc, path, query, fragment = ( + str(item) for item in urlsplit(request.url) + ) + + default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme] + + if ":" in netloc: + host, port_string = netloc.split(":", 1) + port = int(port_string) + else: + host = netloc + port = default_port + + # Include the 'host' header. + if "host" in request.headers: + headers: typing.List[typing.Tuple[bytes, bytes]] = [] + elif port == default_port: + headers = [(b"host", host.encode())] + else: + headers = [(b"host", (f"{host}:{port}").encode())] + + # Include other request headers. + headers += [ + (key.lower().encode(), value.encode()) + for key, value in request.headers.items() + ] + + scope: typing.Dict[str, typing.Any] + + if scheme in {"ws", "wss"}: + subprotocol = request.headers.get("sec-websocket-protocol", None) + if subprotocol is None: + subprotocols: typing.Sequence[str] = [] + else: + subprotocols = [value.strip() for value in subprotocol.split(",")] + scope = { + "type": "websocket", + "path": unquote(path), + "raw_path": path.encode(), + "root_path": self.root_path, + "scheme": scheme, + "query_string": query.encode(), + "headers": headers, + "client": ["testclient", 50000], + "server": [host, port], + "subprotocols": subprotocols, + } + session = WebSocketTestSession(self.app, scope, self.portal_factory) + raise _Upgrade(session) + + scope = { + "type": "http", + "http_version": "1.1", + "method": request.method, + "path": unquote(path), + "raw_path": path.encode(), + "root_path": self.root_path, + "scheme": scheme, + "query_string": query.encode(), + "headers": headers, + "client": ["testclient", 50000], + "server": [host, port], + "extensions": {"http.response.template": {}}, + } + + request_complete = False + response_started = False + response_complete: anyio.Event + raw_kwargs: typing.Dict[str, typing.Any] = {"body": io.BytesIO()} + template = None + context = None + + async def receive() -> Message: + nonlocal request_complete + + if request_complete: + if not response_complete.is_set(): + await response_complete.wait() + return {"type": "http.disconnect"} + + body = request.body + if isinstance(body, str): + body_bytes: bytes = body.encode("utf-8") + elif body is None: + body_bytes = b"" + elif isinstance(body, types.GeneratorType): + try: + chunk = body.send(None) + if isinstance(chunk, str): + chunk = chunk.encode("utf-8") + return {"type": "http.request", "body": chunk, "more_body": True} + except StopIteration: + request_complete = True + return {"type": "http.request", "body": b""} + else: + body_bytes = body + + request_complete = True + return {"type": "http.request", "body": body_bytes} + + async def send(message: Message) -> None: + nonlocal raw_kwargs, response_started, template, context + + if message["type"] == "http.response.start": + assert ( + not response_started + ), 'Received multiple "http.response.start" messages.' + raw_kwargs["version"] = 11 + raw_kwargs["status"] = message["status"] + raw_kwargs["reason"] = _get_reason_phrase(message["status"]) + raw_kwargs["headers"] = [ + (key.decode(), value.decode()) + for key, value in message.get("headers", []) + ] + raw_kwargs["preload_content"] = False + raw_kwargs["original_response"] = _MockOriginalResponse( + raw_kwargs["headers"] + ) + response_started = True + elif message["type"] == "http.response.body": + assert ( + response_started + ), 'Received "http.response.body" without "http.response.start".' + assert ( + not response_complete.is_set() + ), 'Received "http.response.body" after response completed.' + body = message.get("body", b"") + more_body = message.get("more_body", False) + if request.method != "HEAD": + raw_kwargs["body"].write(body) + if not more_body: + raw_kwargs["body"].seek(0) + response_complete.set() + elif message["type"] == "http.response.template": + template = message["template"] + context = message["context"] + + try: + with self.portal_factory() as portal: + response_complete = portal.call(anyio.Event) + portal.call(self.app, scope, receive, send) + except BaseException as exc: + if self.raise_server_exceptions: + raise exc + + if self.raise_server_exceptions: + assert response_started, "TestClient did not receive any response." + elif not response_started: + raw_kwargs = { + "version": 11, + "status": 500, + "reason": "Internal Server Error", + "headers": [], + "preload_content": False, + "original_response": _MockOriginalResponse([]), + "body": io.BytesIO(), + } + + raw = requests.packages.urllib3.HTTPResponse(**raw_kwargs) + response = self.build_response(request, raw) + if template is not None: + response.template = template + response.context = context + return response + + +class WebSocketTestSession: + def __init__( + self, + app: ASGI3App, + scope: Scope, + portal_factory: _PortalFactoryType, + ) -> None: + self.app = app + self.scope = scope + self.accepted_subprotocol = None + self.extra_headers = None + self.portal_factory = portal_factory + self._receive_queue: "queue.Queue[typing.Any]" = queue.Queue() + self._send_queue: "queue.Queue[typing.Any]" = queue.Queue() + + def __enter__(self) -> "WebSocketTestSession": + self.exit_stack = contextlib.ExitStack() + self.portal = self.exit_stack.enter_context(self.portal_factory()) + + try: + _: "Future[None]" = self.portal.start_task_soon(self._run) + self.send({"type": "websocket.connect"}) + message = self.receive() + self._raise_on_close(message) + except Exception: + self.exit_stack.close() + raise + self.accepted_subprotocol = message.get("subprotocol", None) + self.extra_headers = message.get("headers", None) + return self + + def __exit__(self, *args: typing.Any) -> None: + try: + self.close(1000) + finally: + self.exit_stack.close() + while not self._send_queue.empty(): + message = self._send_queue.get() + if isinstance(message, BaseException): + raise message + + async def _run(self) -> None: + """ + The sub-thread in which the websocket session runs. + """ + scope = self.scope + receive = self._asgi_receive + send = self._asgi_send + try: + await self.app(scope, receive, send) + except BaseException as exc: + self._send_queue.put(exc) + raise + + async def _asgi_receive(self) -> Message: + while self._receive_queue.empty(): + await anyio.sleep(0) + return self._receive_queue.get() + + async def _asgi_send(self, message: Message) -> None: + self._send_queue.put(message) + + def _raise_on_close(self, message: Message) -> None: + if message["type"] == "websocket.close": + raise WebSocketDisconnect( + message.get("code", 1000), message.get("reason", "") + ) + + def send(self, message: Message) -> None: + self._receive_queue.put(message) + + def send_text(self, data: str) -> None: + self.send({"type": "websocket.receive", "text": data}) + + def send_bytes(self, data: bytes) -> None: + self.send({"type": "websocket.receive", "bytes": data}) + + def send_json(self, data: typing.Any, mode: str = "text") -> None: + assert mode in ["text", "binary"] + text = json.dumps(data) + if mode == "text": + self.send({"type": "websocket.receive", "text": text}) + else: + self.send({"type": "websocket.receive", "bytes": text.encode("utf-8")}) + + def close(self, code: int = 1000) -> None: + self.send({"type": "websocket.disconnect", "code": code}) + + def receive(self) -> Message: + message = self._send_queue.get() + if isinstance(message, BaseException): + raise message + return message + + def receive_text(self) -> str: + message = self.receive() + self._raise_on_close(message) + return message["text"] + + def receive_bytes(self) -> bytes: + message = self.receive() + self._raise_on_close(message) + return message["bytes"] + + def receive_json(self, mode: str = "text") -> typing.Any: + assert mode in ["text", "binary"] + message = self.receive() + self._raise_on_close(message) + if mode == "text": + text = message["text"] + else: + text = message["bytes"].decode("utf-8") + return json.loads(text) + + +class TestClient(requests.Session): + __test__ = False # For pytest to not discover this up. + task: "Future[None]" + portal: typing.Optional[anyio.abc.BlockingPortal] = None + + def __init__( + self, + app: typing.Union[ASGI2App, ASGI3App], + base_url: str = "http://testserver", + raise_server_exceptions: bool = True, + root_path: str = "", + backend: str = "asyncio", + backend_options: typing.Optional[typing.Dict[str, typing.Any]] = None, + ) -> None: + super().__init__() + self.async_backend = _AsyncBackend( + backend=backend, backend_options=backend_options or {} + ) + if _is_asgi3(app): + app = typing.cast(ASGI3App, app) + asgi_app = app + else: + app = typing.cast(ASGI2App, app) + asgi_app = _WrapASGI2(app) #  type: ignore + adapter = _ASGIAdapter( + asgi_app, + portal_factory=self._portal_factory, + raise_server_exceptions=raise_server_exceptions, + root_path=root_path, + ) + self.mount("http://", adapter) + self.mount("https://", adapter) + self.mount("ws://", adapter) + self.mount("wss://", adapter) + self.headers.update({"user-agent": "testclient"}) + self.app = asgi_app + self.base_url = base_url + + @contextlib.contextmanager + def _portal_factory( + self, + ) -> typing.Generator[anyio.abc.BlockingPortal, None, None]: + if self.portal is not None: + yield self.portal + else: + with anyio.start_blocking_portal(**self.async_backend) as portal: + yield portal + + def request( # type: ignore + self, + method: str, + url: str, + params: Params = None, + data: DataType = None, + headers: typing.MutableMapping[str, str] = None, + cookies: Cookies = None, + files: FileType = None, + auth: AuthType = None, + timeout: TimeOut = None, + allow_redirects: bool = None, + proxies: typing.MutableMapping[str, str] = None, + hooks: typing.Any = None, + stream: bool = None, + verify: typing.Union[bool, str] = None, + cert: typing.Union[str, typing.Tuple[str, str]] = None, + json: typing.Any = None, + ) -> requests.Response: + url = urljoin(self.base_url, url) + return super().request( + method, + url, + params=params, + data=data, + headers=headers, + cookies=cookies, + files=files, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + proxies=proxies, + hooks=hooks, + stream=stream, + verify=verify, + cert=cert, + json=json, + ) + + def websocket_connect( + self, url: str, subprotocols: typing.Sequence[str] = None, **kwargs: typing.Any + ) -> typing.Any: + url = urljoin("ws://testserver", url) + headers = kwargs.get("headers", {}) + headers.setdefault("connection", "upgrade") + headers.setdefault("sec-websocket-key", "testserver==") + headers.setdefault("sec-websocket-version", "13") + if subprotocols is not None: + headers.setdefault("sec-websocket-protocol", ", ".join(subprotocols)) + kwargs["headers"] = headers + try: + super().request("GET", url, **kwargs) + except _Upgrade as exc: + session = exc.session + else: + raise RuntimeError("Expected WebSocket upgrade") # pragma: no cover + + return session + + def __enter__(self) -> "TestClient": + with contextlib.ExitStack() as stack: + self.portal = portal = stack.enter_context( + anyio.start_blocking_portal(**self.async_backend) + ) + + @stack.callback + def reset_portal() -> None: + self.portal = None + + self.stream_send = StapledObjectStream( + *anyio.create_memory_object_stream(math.inf) + ) + self.stream_receive = StapledObjectStream( + *anyio.create_memory_object_stream(math.inf) + ) + self.task = portal.start_task_soon(self.lifespan) + portal.call(self.wait_startup) + + @stack.callback + def wait_shutdown() -> None: + portal.call(self.wait_shutdown) + + self.exit_stack = stack.pop_all() + + return self + + def __exit__(self, *args: typing.Any) -> None: + self.exit_stack.close() + + async def lifespan(self) -> None: + scope = {"type": "lifespan"} + try: + await self.app(scope, self.stream_receive.receive, self.stream_send.send) + finally: + await self.stream_send.send(None) + + async def wait_startup(self) -> None: + await self.stream_receive.send({"type": "lifespan.startup"}) + + async def receive() -> typing.Any: + message = await self.stream_send.receive() + if message is None: + self.task.result() + return message + + message = await receive() + assert message["type"] in ( + "lifespan.startup.complete", + "lifespan.startup.failed", + ) + if message["type"] == "lifespan.startup.failed": + await receive() + + async def wait_shutdown(self) -> None: + async def receive() -> typing.Any: + message = await self.stream_send.receive() + if message is None: + self.task.result() + return message + + async with self.stream_send: + await self.stream_receive.send({"type": "lifespan.shutdown"}) + message = await receive() + assert message["type"] in ( + "lifespan.shutdown.complete", + "lifespan.shutdown.failed", + ) + if message["type"] == "lifespan.shutdown.failed": + await receive() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/types.py new file mode 100644 index 00000000..888645ca --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/types.py @@ -0,0 +1,9 @@ +import typing + +Scope = typing.MutableMapping[str, typing.Any] +Message = typing.MutableMapping[str, typing.Any] + +Receive = typing.Callable[[], typing.Awaitable[Message]] +Send = typing.Callable[[Message], typing.Awaitable[None]] + +ASGIApp = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/websockets.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/websockets.py new file mode 100644 index 00000000..afcbde7f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/starlette/websockets.py @@ -0,0 +1,193 @@ +import enum +import json +import typing + +from starlette.requests import HTTPConnection +from starlette.types import Message, Receive, Scope, Send + + +class WebSocketState(enum.Enum): + CONNECTING = 0 + CONNECTED = 1 + DISCONNECTED = 2 + + +class WebSocketDisconnect(Exception): + def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None: + self.code = code + self.reason = reason or "" + + +class WebSocket(HTTPConnection): + def __init__(self, scope: Scope, receive: Receive, send: Send) -> None: + super().__init__(scope) + assert scope["type"] == "websocket" + self._receive = receive + self._send = send + self.client_state = WebSocketState.CONNECTING + self.application_state = WebSocketState.CONNECTING + + async def receive(self) -> Message: + """ + Receive ASGI websocket messages, ensuring valid state transitions. + """ + if self.client_state == WebSocketState.CONNECTING: + message = await self._receive() + message_type = message["type"] + if message_type != "websocket.connect": + raise RuntimeError( + 'Expected ASGI message "websocket.connect", ' + f"but got {message_type!r}" + ) + self.client_state = WebSocketState.CONNECTED + return message + elif self.client_state == WebSocketState.CONNECTED: + message = await self._receive() + message_type = message["type"] + if message_type not in {"websocket.receive", "websocket.disconnect"}: + raise RuntimeError( + 'Expected ASGI message "websocket.receive" or ' + f'"websocket.disconnect", but got {message_type!r}' + ) + if message_type == "websocket.disconnect": + self.client_state = WebSocketState.DISCONNECTED + return message + else: + raise RuntimeError( + 'Cannot call "receive" once a disconnect message has been received.' + ) + + async def send(self, message: Message) -> None: + """ + Send ASGI websocket messages, ensuring valid state transitions. + """ + if self.application_state == WebSocketState.CONNECTING: + message_type = message["type"] + if message_type not in {"websocket.accept", "websocket.close"}: + raise RuntimeError( + 'Expected ASGI message "websocket.connect", ' + f"but got {message_type!r}" + ) + if message_type == "websocket.close": + self.application_state = WebSocketState.DISCONNECTED + else: + self.application_state = WebSocketState.CONNECTED + await self._send(message) + elif self.application_state == WebSocketState.CONNECTED: + message_type = message["type"] + if message_type not in {"websocket.send", "websocket.close"}: + raise RuntimeError( + 'Expected ASGI message "websocket.send" or "websocket.close", ' + f"but got {message_type!r}" + ) + if message_type == "websocket.close": + self.application_state = WebSocketState.DISCONNECTED + await self._send(message) + else: + raise RuntimeError('Cannot call "send" once a close message has been sent.') + + async def accept( + self, + subprotocol: typing.Optional[str] = None, + headers: typing.Optional[typing.Iterable[typing.Tuple[bytes, bytes]]] = None, + ) -> None: + headers = headers or [] + + if self.client_state == WebSocketState.CONNECTING: + # If we haven't yet seen the 'connect' message, then wait for it first. + await self.receive() + await self.send( + {"type": "websocket.accept", "subprotocol": subprotocol, "headers": headers} + ) + + def _raise_on_disconnect(self, message: Message) -> None: + if message["type"] == "websocket.disconnect": + raise WebSocketDisconnect(message["code"]) + + async def receive_text(self) -> str: + if self.application_state != WebSocketState.CONNECTED: + raise RuntimeError( + 'WebSocket is not connected. Need to call "accept" first.' + ) + message = await self.receive() + self._raise_on_disconnect(message) + return message["text"] + + async def receive_bytes(self) -> bytes: + if self.application_state != WebSocketState.CONNECTED: + raise RuntimeError( + 'WebSocket is not connected. Need to call "accept" first.' + ) + message = await self.receive() + self._raise_on_disconnect(message) + return message["bytes"] + + async def receive_json(self, mode: str = "text") -> typing.Any: + if mode not in {"text", "binary"}: + raise RuntimeError('The "mode" argument should be "text" or "binary".') + if self.application_state != WebSocketState.CONNECTED: + raise RuntimeError( + 'WebSocket is not connected. Need to call "accept" first.' + ) + message = await self.receive() + self._raise_on_disconnect(message) + + if mode == "text": + text = message["text"] + else: + text = message["bytes"].decode("utf-8") + return json.loads(text) + + async def iter_text(self) -> typing.AsyncIterator[str]: + try: + while True: + yield await self.receive_text() + except WebSocketDisconnect: + pass + + async def iter_bytes(self) -> typing.AsyncIterator[bytes]: + try: + while True: + yield await self.receive_bytes() + except WebSocketDisconnect: + pass + + async def iter_json(self) -> typing.AsyncIterator[typing.Any]: + try: + while True: + yield await self.receive_json() + except WebSocketDisconnect: + pass + + async def send_text(self, data: str) -> None: + await self.send({"type": "websocket.send", "text": data}) + + async def send_bytes(self, data: bytes) -> None: + await self.send({"type": "websocket.send", "bytes": data}) + + async def send_json(self, data: typing.Any, mode: str = "text") -> None: + if mode not in {"text", "binary"}: + raise RuntimeError('The "mode" argument should be "text" or "binary".') + text = json.dumps(data) + if mode == "text": + await self.send({"type": "websocket.send", "text": text}) + else: + await self.send({"type": "websocket.send", "bytes": text.encode("utf-8")}) + + async def close( + self, code: int = 1000, reason: typing.Optional[str] = None + ) -> None: + await self.send( + {"type": "websocket.close", "code": code, "reason": reason or ""} + ) + + +class WebSocketClose: + def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None: + self.code = code + self.reason = reason or "" + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + await send( + {"type": "websocket.close", "code": self.code, "reason": self.reason} + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE new file mode 100644 index 00000000..583f9f6e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA new file mode 100644 index 00000000..30236884 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/METADATA @@ -0,0 +1,176 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.3.0 +Summary: Backported and Experimental Type Hints for Python 3.7+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on Python 3.6 through 3.9 to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +New features may be added to `typing_extensions` as soon as they are specified +in a PEP that has been added to the [python/peps](https://github.com/python/peps) +repository. If the PEP is accepted, the feature will then be added to `typing` +for the next CPython release. No typing PEP has been rejected so far, so we +haven't yet figured out how to deal with that possibility. + +Starting with version 4.0.0, `typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version is incremented for all backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +`typing_extensions` supports Python versions 3.7 and higher. In the future, +support for older Python versions will be dropped some time after that version +reaches end of life. + +## Included items + +This module currently contains the following: + +- Experimental features + + - (Currently none) + +- In `typing` since Python 3.11 + + - `assert_never` + - `assert_type` + - `clear_overloads` + - `@dataclass_transform()` (see PEP 681) + - `get_overloads` + - `LiteralString` (see PEP 675) + - `Never` + - `NotRequired` (see PEP 655) + - `reveal_type` + - `Required` (see PEP 655) + - `Self` (see PEP 673) + - `TypeVarTuple` (see PEP 646) + - `Unpack` (see PEP 646) + +- In `typing` since Python 3.10 + + - `Concatenate` (see PEP 612) + - `ParamSpec` (see PEP 612) + - `ParamSpecArgs` (see PEP 612) + - `ParamSpecKwargs` (see PEP 612) + - `TypeAlias` (see PEP 613) + - `TypeGuard` (see PEP 647) + - `is_typeddict` + +- In `typing` since Python 3.9 + + - `Annotated` (see PEP 593) + +- In `typing` since Python 3.8 + + - `final` (see PEP 591) + - `Final` (see PEP 591) + - `Literal` (see PEP 586) + - `Protocol` (see PEP 544) + - `runtime_checkable` (see PEP 544) + - `TypedDict` (see PEP 589) + - `get_origin` (`typing_extensions` provides this function only in Python 3.7+) + - `get_args` (`typing_extensions` provides this function only in Python 3.7+) + +- In `typing` since Python 3.7 + + - `OrderedDict` + +- In `typing` since Python 3.5 or 3.6 (see [the typing documentation](https://docs.python.org/3.10/library/typing.html) for details) + + - `AsyncContextManager` + - `AsyncGenerator` + - `AsyncIterable` + - `AsyncIterator` + - `Awaitable` + - `ChainMap` + - `ClassVar` (see PEP 526) + - `ContextManager` + - `Coroutine` + - `Counter` + - `DefaultDict` + - `Deque` + - `NamedTuple` + - `NewType` + - `NoReturn` + - `overload` + - `Text` + - `Type` + - `TYPE_CHECKING` + - `get_type_hints` + +# Other Notes and Limitations + +Certain objects were changed after they were added to `typing`, and +`typing_extensions` provides a backport even on newer Python versions: + +- `TypedDict` does not store runtime information + about which (if any) keys are non-required in Python 3.8, and does not + honor the `total` keyword with old-style `TypedDict()` in Python + 3.9.0 and 3.9.1. `TypedDict` also does not support multiple inheritance + with `typing.Generic` on Python <3.11. +- `get_origin` and `get_args` lack support for `Annotated` in + Python 3.8 and lack support for `ParamSpecArgs` and `ParamSpecKwargs` + in 3.9. +- `@final` was changed in Python 3.11 to set the `.__final__` attribute. +- `@overload` was changed in Python 3.11 to make function overloads + introspectable at runtime. In order to access overloads with + `typing_extensions.get_overloads()`, you must use + `@typing_extensions.overload`. +- `NamedTuple` was changed in Python 3.11 to allow for multiple inheritance + with `typing.Generic`. + +There are a few types whose interface was modified between different +versions of typing. For example, `typing.Sequence` was modified to +subclass `typing.Reversible` as of Python 3.5.3. + +These changes are _not_ backported to prevent subtle compatibility +issues when mixing the differing implementations of modified classes. + +Certain types have incorrect runtime behavior due to limitations of older +versions of the typing module: + +- `ParamSpec` and `Concatenate` will not work with `get_args` and + `get_origin`. Certain PEP 612 special cases in user-defined + `Generic`s are also not available. + +These types are only guaranteed to work for static type checking. + +## Running tests + +To run tests, navigate into the appropriate source directory and run +`test_typing_extensions.py`. + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD new file mode 100644 index 00000000..6e1b4d0c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-37.pyc,, +typing_extensions-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.3.0.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755 +typing_extensions-4.3.0.dist-info/METADATA,sha256=Gc182SCaY3PXonSf1hdCKS9NGE0QLoBnxSS9V9VCOIk,6350 +typing_extensions-4.3.0.dist-info/RECORD,, +typing_extensions-4.3.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +typing_extensions.py,sha256=uZ95U2XP0uVC6SD6OhMsspPxUi7HaLXBIulP58ImWDA,75384 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL new file mode 100644 index 00000000..668ba4d0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions-4.3.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions.py new file mode 100644 index 00000000..31d3564e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/typing_extensions.py @@ -0,0 +1,2069 @@ +import abc +import collections +import collections.abc +import functools +import operator +import sys +import types as _types +import typing + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsIndex', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_type_hints', + 'IntVar', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeGuard', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + +_marker = object() + + +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +ClassVar = typing.ClassVar + +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +else: + class _FinalForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# 3.8+: +if hasattr(typing, 'Literal'): + Literal = typing.Literal +# 3.7: +else: + class _LiteralForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return typing._GenericAlias(self, parameters) + + Literal = _LiteralForm('Literal', + doc="""A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy # noqa + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + + +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +ContextManager = typing.ContextManager +AsyncContextManager = typing.AsyncContextManager +DefaultDict = typing.DefaultDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +else: + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator +NewType = typing.NewType +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +_PROTO_WHITELIST = ['Callable', 'Awaitable', + 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', 'AsyncContextManager'] + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in ('Protocol', 'Generic'): + continue + annotations = getattr(base, '__annotations__', {}) + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if (not attr.startswith('_abc_') and attr not in ( + '__abstractmethods__', '__annotations__', '__weakref__', + '_is_protocol', '_is_runtime_protocol', '__dict__', + '__args__', '__slots__', + '__next_in_mro__', '__parameters__', '__origin__', + '__orig_bases__', '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', '__init__', '__new__', + '__module__', '_MutableMapping__marker', '_gorg')): + attrs.add(attr) + return attrs + + +def _is_callable_members_only(cls): + return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) + + +def _maybe_adjust_parameters(cls): + """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. + + The contents of this function are very similar + to logic found in typing.Generic.__init_subclass__ + on the CPython main branch. + """ + tvars = [] + if '__orig_bases__' in cls.__dict__: + tvars = typing._collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + +# 3.8+ +if hasattr(typing, 'Protocol'): + Protocol = typing.Protocol +# 3.7 +else: + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + class _ProtocolMeta(abc.ABCMeta): + # This metaclass is a bit unfortunate and exists only because of the lack + # of __instancehook__. + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if ((not getattr(cls, '_is_protocol', False) or + _is_callable_members_only(cls)) and + issubclass(instance.__class__, cls)): + return True + if cls._is_protocol: + if all(hasattr(instance, attr) and + (not callable(getattr(cls, attr, None)) or + getattr(instance, attr) is not None) + for attr in _get_protocol_attrs(cls)): + return True + return super().__instancecheck__(instance) + + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this while these live in two different + # modules. The duplicated code will be removed when Protocol is moved to typing. + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime act as simple-minded runtime protocol that checks + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) # noqa + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + def _proto_hook(other): + if not cls.__dict__.get('_is_protocol', None): + return NotImplemented + if not getattr(cls, '_is_runtime_protocol', False): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Instance and class checks can only be used with" + " @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + return NotImplemented + raise TypeError("Protocols with non-method members" + " don't support issubclass()") + if not isinstance(other, type): + # Same error as for issubclass(1, int) + raise TypeError('issubclass() arg 1 must be a class') + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, '__annotations__', {}) + if (isinstance(annotations, typing.Mapping) and + attr in annotations and + isinstance(other, _ProtocolMeta) and + other._is_protocol): + break + else: + return NotImplemented + return True + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # We have nothing more to do for non-protocols. + if not cls._is_protocol: + return + + # Check consistency of bases. + for base in cls.__bases__: + if not (base in (object, typing.Generic) or + base.__module__ == 'collections.abc' and + base.__name__ in _PROTO_WHITELIST or + isinstance(base, _ProtocolMeta) and base._is_protocol): + raise TypeError('Protocols can only inherit from other' + f' protocols, got {repr(base)}') + cls.__init__ = _no_init + + +# 3.8+ +if hasattr(typing, 'runtime_checkable'): + runtime_checkable = typing.runtime_checkable +# 3.7 +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + return cls + + +# Exists for backwards compatibility. +runtime = runtime_checkable + + +# 3.8+ +if hasattr(typing, 'SupportsIndex'): + SupportsIndex = typing.SupportsIndex +# 3.7 +else: + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + +if hasattr(typing, "Required"): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals['__name__'] not in ['abc', + 'functools', + 'typing']: + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + except (AttributeError, ValueError): + pass + return False + + def _dict_new(*args, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + return dict(*args, **kwargs) + + _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError('TypedDict.__new__(): not enough arguments') + _, args = args[0], args[1:] # allow the "cls" keyword be passed + if args: + typename, args = args[0], args[1:] # allow the "_typename" keyword be passed + elif '_typename' in kwargs: + typename = kwargs.pop('_typename') + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional " + "argument: '_typename'") + if args: + try: + fields, = args # allow the "_fields" keyword be passed + except ValueError: + raise TypeError('TypedDict.__new__() takes from 2 to 3 ' + f'positional arguments but {len(args) + 2} ' + 'were given') + elif '_fields' in kwargs and len(kwargs) == 1: + fields = kwargs.pop('_fields') + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + fields = None + + if fields is None: + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + + ns = {'__annotations__': dict(fields)} + try: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' + ' /, *, total=True, **kwargs)') + + class _TypedDictMeta(type): + def __init__(cls, name, bases, ns, total=True): + super().__init__(name, bases, ns) + + def __new__(cls, name, bases, ns, total=True): + # Create new typed dict class object. + # This method is called directly when TypedDict is subclassed, + # or via _typeddict_new when TypedDict is instantiated. This way + # TypedDict supports all three syntaxes described in its docstring. + # Subclasses and instances of TypedDict return actual dictionaries + # via _dict_new. + ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new + # Don't insert typing.Generic into __bases__ here, + # or Generic.__init_subclass__ will raise TypeError + # in the super().__new__() call. + # Instead, monkey-patch __bases__ onto the class after it's been created. + tp_dict = super().__new__(cls, name, (dict,), ns) + + if any(issubclass(base, typing.Generic) for base in bases): + tp_dict.__bases__ = (typing.Generic, dict) + _maybe_adjust_parameters(tp_dict) + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + own_annotations = { + n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __instancecheck__ = __subclasscheck__ = _check_fails + + TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = \ + """A simple typed name space. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type that expects all of its + instances to have a certain set of keys, with each key + associated with a value of a consistent type. This expectation + is not checked at runtime but is only enforced by type checkers. + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports two additional equivalent forms:: + + Point2D = TypedDict('Point2D', x=int, y=int, label=str) + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + The class syntax is only supported in Python 3.6+, while two other + syntax forms work for Python 2.7 and 3.2+ + """ + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(__val, __typ): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return __val + + +if hasattr(typing, "Required"): + get_type_hints = typing.get_type_hints +else: + import functools + import types + + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return types.GenericAlias(t.__origin__, stripped_args) + if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.7-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeAliasForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 +else: + class _TypeAliasForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + TypeAlias = _TypeAliasForm('TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.7-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + ParamSpec = typing.ParamSpec +# 3.7-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.7-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.7-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_TypeAliasForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.7-8 +else: + class _ConcatenateForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + class _TypeGuardForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_TypeGuardForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 +else: + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: + class _RequiredForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +if hasattr(typing, "Unpack"): # 3.11+ + Unpack = typing.Unpack +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + """A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm( + 'Unpack', + doc="""A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + TypeVarTuple = typing.TypeVarTuple +else: + class TypeVarTuple: + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name): + self.__name__ = name + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): + reveal_type = typing.reveal_type +else: + def reveal_type(__obj: T) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj + + +if hasattr(typing, "assert_never"): + assert_never = typing.assert_never +else: + def assert_never(__arg: Never) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + raise AssertionError("Expected code to be unreachable") + + +if hasattr(typing, 'dataclass_transform'): + dataclass_transform = typing.dataclass_transform +else: + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic + + +# Backport typing.NamedTuple as it exists in Python 3.11. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +if sys.version_info >= (3, 11): + NamedTuple = typing.NamedTuple +else: + def _caller(): + try: + return sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + types = ns.get('__annotations__', {}) + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + def NamedTuple(__typename, __fields=None, **kwargs): + if __fields is None: + __fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(__typename, __fields, module=_caller()) + + NamedTuple.__doc__ = typing.NamedTuple.__doc__ + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + # On 3.8+, alter the signature so that it matches typing.NamedTuple. + # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, + # so just leave the signature as it is on 3.7. + if sys.version_info >= (3, 8): + NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + NamedTuple.__mro_entries__ = _namedtuple_mro_entries diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/METADATA new file mode 100644 index 00000000..e1faca7a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/METADATA @@ -0,0 +1,177 @@ +Metadata-Version: 2.1 +Name: uvicorn +Version: 0.18.3 +Summary: The lightning-fast ASGI server. +Project-URL: Changelog, https://github.com/encode/uvicorn/blob/master/CHANGELOG.md +Project-URL: Funding, https://github.com/sponsors/encode +Project-URL: Homepage, https://www.uvicorn.org/ +Project-URL: Source, https://github.com/encode/uvicorn +Author-email: Tom Christie +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.7 +Requires-Dist: click>=7.0 +Requires-Dist: h11>=0.8 +Requires-Dist: typing-extensions; python_version < '3.8' +Provides-Extra: standard +Requires-Dist: colorama>=0.4; sys_platform == 'win32' and extra == 'standard' +Requires-Dist: httptools>=0.4.0; extra == 'standard' +Requires-Dist: python-dotenv>=0.13; extra == 'standard' +Requires-Dist: pyyaml>=5.1; extra == 'standard' +Requires-Dist: uvloop!=0.15.0,!=0.15.1,>=0.14.0; sys_platform != 'win32' and (sys_platform != 'cygwin' and platform_python_implementation != 'PyPy') and extra == 'standard' +Requires-Dist: watchfiles>=0.13; extra == 'standard' +Requires-Dist: websockets>=10.0; extra == 'standard' +Description-Content-Type: text/markdown + +

+ uvicorn +

+ +

+An ASGI web server, for Python. +

+ +--- + +[![Build Status](https://github.com/encode/uvicorn/workflows/Test%20Suite/badge.svg)](https://github.com/encode/uvicorn/actions) +[![Package version](https://badge.fury.io/py/uvicorn.svg)](https://pypi.python.org/pypi/uvicorn) + +**Documentation**: [https://www.uvicorn.org](https://www.uvicorn.org) + +**Requirements**: Python 3.7+ (For Python 3.6 support, install version 0.16.0.) + +Uvicorn is an ASGI web server implementation for Python. + +Until recently Python has lacked a minimal low-level server/application interface for +async frameworks. The [ASGI specification][asgi] fills this gap, and means we're now able to +start building a common set of tooling usable across all async frameworks. + +Uvicorn supports HTTP/1.1 and WebSockets. + +## Quickstart + +Install using `pip`: + +```shell +$ pip install uvicorn +``` + +This will install uvicorn with minimal (pure Python) dependencies. + +```shell +$ pip install uvicorn[standard] +``` + +This will install uvicorn with "Cython-based" dependencies (where possible) and other "optional extras". + +In this context, "Cython-based" means the following: + +- the event loop `uvloop` will be installed and used if possible. +- the http protocol will be handled by `httptools` if possible. + +Moreover, "optional extras" means that: + +- the websocket protocol will be handled by `websockets` (should you want to use `wsproto` you'd need to install it manually) if possible. +- the `--reload` flag in development mode will use `watchfiles`. +- windows users will have `colorama` installed for the colored logs. +- `python-dotenv` will be installed should you want to use the `--env-file` option. +- `PyYAML` will be installed to allow you to provide a `.yaml` file to `--log-config`, if desired. + +Create an application, in `example.py`: + +```python +async def app(scope, receive, send): + assert scope['type'] == 'http' + + await send({ + 'type': 'http.response.start', + 'status': 200, + 'headers': [ + [b'content-type', b'text/plain'], + ], + }) + await send({ + 'type': 'http.response.body', + 'body': b'Hello, world!', + }) +``` + +Run the server: + +```shell +$ uvicorn example:app +``` + +--- + +## Why ASGI? + +Most well established Python Web frameworks started out as WSGI-based frameworks. + +WSGI applications are a single, synchronous callable that takes a request and returns a response. +This doesn’t allow for long-lived connections, like you get with long-poll HTTP or WebSocket connections, +which WSGI doesn't support well. + +Having an async concurrency model also allows for options such as lightweight background tasks, +and can be less of a limiting factor for endpoints that have long periods being blocked on network +I/O such as dealing with slow HTTP requests. + +--- + +## Alternative ASGI servers + +A strength of the ASGI protocol is that it decouples the server implementation +from the application framework. This allows for an ecosystem of interoperating +webservers and application frameworks. + +### Daphne + +The first ASGI server implementation, originally developed to power Django Channels, is [the Daphne webserver][daphne]. + +It is run widely in production, and supports HTTP/1.1, HTTP/2, and WebSockets. + +Any of the example applications given here can equally well be run using `daphne` instead. + +``` +$ pip install daphne +$ daphne app:App +``` + +### Hypercorn + +[Hypercorn][hypercorn] was initially part of the Quart web framework, before +being separated out into a standalone ASGI server. + +Hypercorn supports HTTP/1.1, HTTP/2, and WebSockets. + +It also supports [the excellent `trio` async framework][trio], as an alternative to `asyncio`. + +``` +$ pip install hypercorn +$ hypercorn app:App +``` + +### Mangum + +[Mangum][mangum] is an adapter for using ASGI applications with AWS Lambda & API Gateway. + +--- + +

Uvicorn is BSD licensed code.
Designed & crafted with care.

— 🦄 —

+ +[asgi]: https://asgi.readthedocs.io/en/latest/ +[daphne]: https://github.com/django/daphne +[hypercorn]: https://gitlab.com/pgjones/hypercorn +[mangum]: https://mangum.io +[trio]: https://trio.readthedocs.io diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/RECORD new file mode 100644 index 00000000..c2fc6772 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/RECORD @@ -0,0 +1,88 @@ +../../Scripts/uvicorn.exe,sha256=E98Re0I2Wk-skGSFXgYT7ZfUw2TaGtBjCN9q2BgsI_k,106373 +uvicorn-0.18.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +uvicorn-0.18.3.dist-info/METADATA,sha256=joHuKDTIFuQ9_WH4jhj-ouJojd0Z6eJeVAQcQhj8YLk,6152 +uvicorn-0.18.3.dist-info/RECORD,, +uvicorn-0.18.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn-0.18.3.dist-info/WHEEL,sha256=3DSmSyYE1SDERO2-rI3qUbMVovBs7--ggc7mubOemsc,86 +uvicorn-0.18.3.dist-info/entry_points.txt,sha256=FW1w-hkc9QgwaGoovMvm0ZY73w_NcycWdGAUfDsNGxw,46 +uvicorn-0.18.3.dist-info/licenses/LICENSE.md,sha256=7-Gs8-YvuZwoiw7HPlp3O3Jo70Mg_nV-qZQhTktjw3E,1526 +uvicorn/__init__.py,sha256=6HPPD-sOC7GB84mf_MAJVb7NjIhvqgpAT0CC2G6BZ8k,147 +uvicorn/__main__.py,sha256=DQizy6nKP0ywhPpnCHgmRDYIMfcqZKVEzNIWQZjqtVQ,62 +uvicorn/__pycache__/__init__.cpython-37.pyc,, +uvicorn/__pycache__/__main__.cpython-37.pyc,, +uvicorn/__pycache__/_subprocess.cpython-37.pyc,, +uvicorn/__pycache__/_types.cpython-37.pyc,, +uvicorn/__pycache__/config.cpython-37.pyc,, +uvicorn/__pycache__/importer.cpython-37.pyc,, +uvicorn/__pycache__/logging.cpython-37.pyc,, +uvicorn/__pycache__/main.cpython-37.pyc,, +uvicorn/__pycache__/server.cpython-37.pyc,, +uvicorn/__pycache__/workers.cpython-37.pyc,, +uvicorn/_subprocess.py,sha256=zip7kqIlWL_GG7dBnl9dVuzScnjDt97VJJNH5PJFcts,2403 +uvicorn/_types.py,sha256=jysk8Dv1S22qgU9P4pVgLz8Xwz7VO61AGCVcCQr4feY,423 +uvicorn/config.py,sha256=xysHliIRVd9_8aDjRqmnLE06I7Z-0wN3Q1te8Hbvfvk,21498 +uvicorn/importer.py,sha256=aGwb0nygYznrjdopDkcmbyWVHXdZAIounPHPEQ245Hk,1166 +uvicorn/lifespan/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/lifespan/__pycache__/__init__.cpython-37.pyc,, +uvicorn/lifespan/__pycache__/off.cpython-37.pyc,, +uvicorn/lifespan/__pycache__/on.cpython-37.pyc,, +uvicorn/lifespan/off.py,sha256=wjwbgYUYHwTUkMQVOt-wnNSSmEvnrrcgvq6tux_xD2o,232 +uvicorn/lifespan/on.py,sha256=jenqzDjiEuZtRPJJEkE0elFApGZkebbT1vcPMKBBbmQ,5192 +uvicorn/logging.py,sha256=kcqf-KVrfukBZznPCJUP6omqUPW44QgYWW_-aLfVxY4,4397 +uvicorn/loops/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/loops/__pycache__/__init__.cpython-37.pyc,, +uvicorn/loops/__pycache__/asyncio.cpython-37.pyc,, +uvicorn/loops/__pycache__/auto.cpython-37.pyc,, +uvicorn/loops/__pycache__/uvloop.cpython-37.pyc,, +uvicorn/loops/asyncio.py,sha256=-SHjygx4KCRGEKaUi6n2GUSlVPWmYzzxssrx5qvkVMg,327 +uvicorn/loops/auto.py,sha256=BWVq18ce9SoFTo3z5zNW2IU2850u2tRrc6WyK7idsdI,400 +uvicorn/loops/uvloop.py,sha256=K4QybYVxtK9C2emDhDPUCkBXR4XMT5Ofv9BPFPoX0ok,148 +uvicorn/main.py,sha256=dsxr1ziEkqUOx67C3WHB3jy39b4AIAMz5wqjWKV3vco,16473 +uvicorn/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/middleware/__pycache__/__init__.cpython-37.pyc,, +uvicorn/middleware/__pycache__/asgi2.cpython-37.pyc,, +uvicorn/middleware/__pycache__/debug.cpython-37.pyc,, +uvicorn/middleware/__pycache__/message_logger.cpython-37.pyc,, +uvicorn/middleware/__pycache__/proxy_headers.cpython-37.pyc,, +uvicorn/middleware/__pycache__/wsgi.cpython-37.pyc,, +uvicorn/middleware/asgi2.py,sha256=Gve5l1W7_IxinJDLEPk5qmwcoCE7awpe0x-d9Kzt4KY,472 +uvicorn/middleware/debug.py,sha256=_pzNEHRBhbzcJDZuD2HzjvzzTLEROgRTBdIkPoR4W-g,3551 +uvicorn/middleware/message_logger.py,sha256=pLhbeiPxeeH_H7okbd5RxS9x9YcluZlcCICKplHhpsY,2925 +uvicorn/middleware/proxy_headers.py,sha256=x5FdQUFTi5-HjwGmsHMMiIqC3XcgFOCf83mbZOmBlGQ,3072 +uvicorn/middleware/wsgi.py,sha256=QeLhPVC8x8IgK5807Rogbe_Me1wNTW5jN1TjDxRoJgA,6718 +uvicorn/protocols/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/protocols/__pycache__/__init__.cpython-37.pyc,, +uvicorn/protocols/__pycache__/utils.cpython-37.pyc,, +uvicorn/protocols/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/protocols/http/__pycache__/__init__.cpython-37.pyc,, +uvicorn/protocols/http/__pycache__/auto.cpython-37.pyc,, +uvicorn/protocols/http/__pycache__/flow_control.cpython-37.pyc,, +uvicorn/protocols/http/__pycache__/h11_impl.cpython-37.pyc,, +uvicorn/protocols/http/__pycache__/httptools_impl.cpython-37.pyc,, +uvicorn/protocols/http/auto.py,sha256=fvYmlgqD3ockeVj13Hhjc3kMWBu8zj2D0npUQcvIraM,391 +uvicorn/protocols/http/flow_control.py,sha256=1lgcydIePi59fo8oqNxxe6wqplMrqFbBJ03yQNyBD7U,1844 +uvicorn/protocols/http/h11_impl.py,sha256=15u79j1Sjn4YAVC61ljgyOOCGDrN7-WIU6NGsnLSQtk,20135 +uvicorn/protocols/http/httptools_impl.py,sha256=ZzBIdR0Vjh0F9VES7XK4BWx7qig44-9_XKvFfIF86x0,21095 +uvicorn/protocols/utils.py,sha256=J8pZr9AfKEM7tlsUb-DSSbs0MzmuGrc26rspYcc_Inc,1876 +uvicorn/protocols/websockets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +uvicorn/protocols/websockets/__pycache__/__init__.cpython-37.pyc,, +uvicorn/protocols/websockets/__pycache__/auto.cpython-37.pyc,, +uvicorn/protocols/websockets/__pycache__/websockets_impl.cpython-37.pyc,, +uvicorn/protocols/websockets/__pycache__/wsproto_impl.cpython-37.pyc,, +uvicorn/protocols/websockets/auto.py,sha256=Fj-Bufny9Syw9cz0MXzZK4ajGkrTSMg1KvwskfWjuyc,539 +uvicorn/protocols/websockets/websockets_impl.py,sha256=FtKQAr9zes5ybwv2Ge5gdypaCGriFoOFr41ZIfCFo1M,12949 +uvicorn/protocols/websockets/wsproto_impl.py,sha256=uGkxOOuMznB0JRbYcH0VgeqQvjc27QONR4WWnEcK4wo,11967 +uvicorn/server.py,sha256=vXi2tQes1en6BnPygvb5WG99Ly4fGe9Rln2mmli7ttI,10792 +uvicorn/supervisors/__init__.py,sha256=YSH0n2BiqyN5m3QaT_QAkS0DkFE2xXHpKDc4ORbh82o,670 +uvicorn/supervisors/__pycache__/__init__.cpython-37.pyc,, +uvicorn/supervisors/__pycache__/basereload.cpython-37.pyc,, +uvicorn/supervisors/__pycache__/multiprocess.cpython-37.pyc,, +uvicorn/supervisors/__pycache__/statreload.cpython-37.pyc,, +uvicorn/supervisors/__pycache__/watchfilesreload.cpython-37.pyc,, +uvicorn/supervisors/__pycache__/watchgodreload.cpython-37.pyc,, +uvicorn/supervisors/basereload.py,sha256=m2oFWDHdnMZWnAAwuor0RY26N95kmX1wboNZhWeRanw,3341 +uvicorn/supervisors/multiprocess.py,sha256=TdLMy1FxpQp4h7L7wox9n3Qs5KrNrEBxsNZFuM7dOJ0,2231 +uvicorn/supervisors/statreload.py,sha256=p4_6gR9wOWRT6k04DBiwtQl6GINwuKdoTZJz6afArT4,1580 +uvicorn/supervisors/watchfilesreload.py,sha256=5gO3lrPgaC6t8qIWANW2diAXZi8pLLY3NbQlsyxBias,2902 +uvicorn/supervisors/watchgodreload.py,sha256=-htNITwVAcSCBo2ZhVlm_skZ19S7uwiQyrJUfvHKr8c,5491 +uvicorn/workers.py,sha256=g8AGeonUl1xMnjXhxLQ2f7bG1LLfApFE7QhKEa9QYNk,3068 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/REQUESTED b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/WHEEL new file mode 100644 index 00000000..748cda9e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.8.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/entry_points.txt new file mode 100644 index 00000000..4b00fcb6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +uvicorn = uvicorn.main:main diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/licenses/LICENSE.md b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/licenses/LICENSE.md new file mode 100644 index 00000000..a6bba145 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn-0.18.3.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2017-present, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__init__.py new file mode 100644 index 00000000..62c740ce --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__init__.py @@ -0,0 +1,5 @@ +from uvicorn.config import Config +from uvicorn.main import Server, main, run + +__version__ = "0.18.3" +__all__ = ["main", "run", "Config", "Server"] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__main__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__main__.py new file mode 100644 index 00000000..8a1dc979 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/__main__.py @@ -0,0 +1,4 @@ +import uvicorn + +if __name__ == "__main__": + uvicorn.main() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_subprocess.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_subprocess.py new file mode 100644 index 00000000..e05473c7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_subprocess.py @@ -0,0 +1,76 @@ +""" +Some light wrappers around Python's multiprocessing, to deal with cleanly +starting child processes. +""" +import multiprocessing +import os +import sys +from multiprocessing.context import SpawnProcess +from socket import socket +from typing import Callable, List, Optional + +from uvicorn.config import Config + +multiprocessing.allow_connection_pickling() +spawn = multiprocessing.get_context("spawn") + + +def get_subprocess( + config: Config, + target: Callable[..., None], + sockets: List[socket], +) -> SpawnProcess: + """ + Called in the parent process, to instantiate a new child process instance. + The child is not yet started at this point. + + * config - The Uvicorn configuration instance. + * target - A callable that accepts a list of sockets. In practice this will + be the `Server.run()` method. + * sockets - A list of sockets to pass to the server. Sockets are bound once + by the parent process, and then passed to the child processes. + """ + # We pass across the stdin fileno, and reopen it in the child process. + # This is required for some debugging environments. + stdin_fileno: Optional[int] + try: + stdin_fileno = sys.stdin.fileno() + except OSError: + stdin_fileno = None + + kwargs = { + "config": config, + "target": target, + "sockets": sockets, + "stdin_fileno": stdin_fileno, + } + + return spawn.Process(target=subprocess_started, kwargs=kwargs) + + +def subprocess_started( + config: Config, + target: Callable[..., None], + sockets: List[socket], + stdin_fileno: Optional[int], +) -> None: + """ + Called when the child process starts. + + * config - The Uvicorn configuration instance. + * target - A callable that accepts a list of sockets. In practice this will + be the `Server.run()` method. + * sockets - A list of sockets to pass to the server. Sockets are bound once + by the parent process, and then passed to the child processes. + * stdin_fileno - The file number of sys.stdin, so that it can be reattached + to the child process. + """ + # Re-open stdin. + if stdin_fileno is not None: + sys.stdin = os.fdopen(stdin_fileno) + + # Logging needs to be setup again for each child. + config.configure_logging() + + # Now we can call into `Server.run(sockets=sockets)` + target(sockets=sockets) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_types.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_types.py new file mode 100644 index 00000000..0a547a50 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/_types.py @@ -0,0 +1,14 @@ +import types +import typing + +# WSGI +Environ = typing.MutableMapping[str, typing.Any] +ExcInfo = typing.Tuple[ + typing.Type[BaseException], BaseException, typing.Optional[types.TracebackType] +] +StartResponse = typing.Callable[ + [str, typing.Iterable[typing.Tuple[str, str]], typing.Optional[ExcInfo]], None +] +WSGIApp = typing.Callable[ + [Environ, StartResponse], typing.Union[typing.Iterable[bytes], BaseException] +] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/config.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/config.py new file mode 100644 index 00000000..3cb6bf3d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/config.py @@ -0,0 +1,591 @@ +import asyncio +import inspect +import json +import logging +import logging.config +import os +import socket +import ssl +import sys +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + List, + Optional, + Tuple, + Type, + Union, +) + +from h11._connection import DEFAULT_MAX_INCOMPLETE_EVENT_SIZE + +from uvicorn.logging import TRACE_LOG_LEVEL + +if sys.version_info < (3, 8): # pragma: py-gte-38 + from typing_extensions import Literal +else: # pragma: py-lt-38 + from typing import Literal + +import click + +try: + import yaml +except ImportError: # pragma: no cover + # If the code below that depends on yaml is exercised, it will raise a NameError. + # Install the PyYAML package or the uvicorn[standard] optional dependencies to + # enable this functionality. + pass + +from uvicorn.importer import ImportFromStringError, import_from_string +from uvicorn.middleware.asgi2 import ASGI2Middleware +from uvicorn.middleware.debug import DebugMiddleware +from uvicorn.middleware.message_logger import MessageLoggerMiddleware +from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware +from uvicorn.middleware.wsgi import WSGIMiddleware + +if TYPE_CHECKING: + from asgiref.typing import ASGIApplication + +HTTPProtocolType = Literal["auto", "h11", "httptools"] +WSProtocolType = Literal["auto", "none", "websockets", "wsproto"] +LifespanType = Literal["auto", "on", "off"] +LoopSetupType = Literal["none", "auto", "asyncio", "uvloop"] +InterfaceType = Literal["auto", "asgi3", "asgi2", "wsgi"] + +LOG_LEVELS: Dict[str, int] = { + "critical": logging.CRITICAL, + "error": logging.ERROR, + "warning": logging.WARNING, + "info": logging.INFO, + "debug": logging.DEBUG, + "trace": TRACE_LOG_LEVEL, +} +HTTP_PROTOCOLS: Dict[HTTPProtocolType, str] = { + "auto": "uvicorn.protocols.http.auto:AutoHTTPProtocol", + "h11": "uvicorn.protocols.http.h11_impl:H11Protocol", + "httptools": "uvicorn.protocols.http.httptools_impl:HttpToolsProtocol", +} +WS_PROTOCOLS: Dict[WSProtocolType, Optional[str]] = { + "auto": "uvicorn.protocols.websockets.auto:AutoWebSocketsProtocol", + "none": None, + "websockets": "uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol", + "wsproto": "uvicorn.protocols.websockets.wsproto_impl:WSProtocol", +} +LIFESPAN: Dict[LifespanType, str] = { + "auto": "uvicorn.lifespan.on:LifespanOn", + "on": "uvicorn.lifespan.on:LifespanOn", + "off": "uvicorn.lifespan.off:LifespanOff", +} +LOOP_SETUPS: Dict[LoopSetupType, Optional[str]] = { + "none": None, + "auto": "uvicorn.loops.auto:auto_loop_setup", + "asyncio": "uvicorn.loops.asyncio:asyncio_setup", + "uvloop": "uvicorn.loops.uvloop:uvloop_setup", +} +INTERFACES: List[InterfaceType] = ["auto", "asgi3", "asgi2", "wsgi"] + + +SSL_PROTOCOL_VERSION: int = ssl.PROTOCOL_TLS_SERVER + + +LOGGING_CONFIG: Dict[str, Any] = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "default": { + "()": "uvicorn.logging.DefaultFormatter", + "fmt": "%(levelprefix)s %(message)s", + "use_colors": None, + }, + "access": { + "()": "uvicorn.logging.AccessFormatter", + "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s', # noqa: E501 + }, + }, + "handlers": { + "default": { + "formatter": "default", + "class": "logging.StreamHandler", + "stream": "ext://sys.stderr", + }, + "access": { + "formatter": "access", + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + }, + "loggers": { + "uvicorn": {"handlers": ["default"], "level": "INFO"}, + "uvicorn.error": {"level": "INFO"}, + "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False}, + }, +} + +logger = logging.getLogger("uvicorn.error") + + +def create_ssl_context( + certfile: Union[str, os.PathLike], + keyfile: Optional[Union[str, os.PathLike]], + password: Optional[str], + ssl_version: int, + cert_reqs: int, + ca_certs: Optional[Union[str, os.PathLike]], + ciphers: Optional[str], +) -> ssl.SSLContext: + ctx = ssl.SSLContext(ssl_version) + get_password = (lambda: password) if password else None + ctx.load_cert_chain(certfile, keyfile, get_password) + ctx.verify_mode = ssl.VerifyMode(cert_reqs) + if ca_certs: + ctx.load_verify_locations(ca_certs) + if ciphers: + ctx.set_ciphers(ciphers) + return ctx + + +def is_dir(path: Path) -> bool: + try: + if not path.is_absolute(): + path = path.resolve() + return path.is_dir() + except OSError: + return False + + +def resolve_reload_patterns( + patterns_list: List[str], directories_list: List[str] +) -> Tuple[List[str], List[Path]]: + + directories: List[Path] = list(set(map(Path, directories_list.copy()))) + patterns: List[str] = patterns_list.copy() + + current_working_directory = Path.cwd() + for pattern in patterns_list: + # Special case for the .* pattern, otherwise this would only match + # hidden directories which is probably undesired + if pattern == ".*": + continue + patterns.append(pattern) + if is_dir(Path(pattern)): + directories.append(Path(pattern)) + else: + for match in current_working_directory.glob(pattern): + if is_dir(match): + directories.append(match) + + directories = list(set(directories)) + directories = list(map(Path, directories)) + directories = list(map(lambda x: x.resolve(), directories)) + directories = list( + {reload_path for reload_path in directories if is_dir(reload_path)} + ) + + children = [] + for j in range(len(directories)): + for k in range(j + 1, len(directories)): + if directories[j] in directories[k].parents: + children.append(directories[k]) # pragma: py-darwin + elif directories[k] in directories[j].parents: + children.append(directories[j]) + + directories = list(set(directories).difference(set(children))) + + return list(set(patterns)), directories + + +def _normalize_dirs(dirs: Union[List[str], str, None]) -> List[str]: + if dirs is None: + return [] + if isinstance(dirs, str): + return [dirs] + return list(set(dirs)) + + +class Config: + def __init__( + self, + app: Union["ASGIApplication", Callable, str], + host: str = "127.0.0.1", + port: int = 8000, + uds: Optional[str] = None, + fd: Optional[int] = None, + loop: LoopSetupType = "auto", + http: Union[Type[asyncio.Protocol], HTTPProtocolType] = "auto", + ws: Union[Type[asyncio.Protocol], WSProtocolType] = "auto", + ws_max_size: int = 16 * 1024 * 1024, + ws_ping_interval: Optional[float] = 20.0, + ws_ping_timeout: Optional[float] = 20.0, + ws_per_message_deflate: bool = True, + lifespan: LifespanType = "auto", + env_file: Optional[Union[str, os.PathLike]] = None, + log_config: Optional[Union[Dict[str, Any], str]] = LOGGING_CONFIG, + log_level: Optional[Union[str, int]] = None, + access_log: bool = True, + use_colors: Optional[bool] = None, + interface: InterfaceType = "auto", + debug: bool = False, + reload: bool = False, + reload_dirs: Optional[Union[List[str], str]] = None, + reload_delay: float = 0.25, + reload_includes: Optional[Union[List[str], str]] = None, + reload_excludes: Optional[Union[List[str], str]] = None, + workers: Optional[int] = None, + proxy_headers: bool = True, + server_header: bool = True, + date_header: bool = True, + forwarded_allow_ips: Optional[str] = None, + root_path: str = "", + limit_concurrency: Optional[int] = None, + limit_max_requests: Optional[int] = None, + backlog: int = 2048, + timeout_keep_alive: int = 5, + timeout_notify: int = 30, + callback_notify: Optional[Callable[..., Awaitable[None]]] = None, + ssl_keyfile: Optional[str] = None, + ssl_certfile: Optional[Union[str, os.PathLike]] = None, + ssl_keyfile_password: Optional[str] = None, + ssl_version: int = SSL_PROTOCOL_VERSION, + ssl_cert_reqs: int = ssl.CERT_NONE, + ssl_ca_certs: Optional[str] = None, + ssl_ciphers: str = "TLSv1", + headers: Optional[List[Tuple[str, str]]] = None, + factory: bool = False, + h11_max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ): + self.app = app + self.host = host + self.port = port + self.uds = uds + self.fd = fd + self.loop = loop + self.http = http + self.ws = ws + self.ws_max_size = ws_max_size + self.ws_ping_interval = ws_ping_interval + self.ws_ping_timeout = ws_ping_timeout + self.ws_per_message_deflate = ws_per_message_deflate + self.lifespan = lifespan + self.log_config = log_config + self.log_level = log_level + self.access_log = access_log + self.use_colors = use_colors + self.interface = interface + self.debug = debug + self.reload = reload + self.reload_delay = reload_delay + self.workers = workers or 1 + self.proxy_headers = proxy_headers + self.server_header = server_header + self.date_header = date_header + self.root_path = root_path + self.limit_concurrency = limit_concurrency + self.limit_max_requests = limit_max_requests + self.backlog = backlog + self.timeout_keep_alive = timeout_keep_alive + self.timeout_notify = timeout_notify + self.callback_notify = callback_notify + self.ssl_keyfile = ssl_keyfile + self.ssl_certfile = ssl_certfile + self.ssl_keyfile_password = ssl_keyfile_password + self.ssl_version = ssl_version + self.ssl_cert_reqs = ssl_cert_reqs + self.ssl_ca_certs = ssl_ca_certs + self.ssl_ciphers = ssl_ciphers + self.headers: List[Tuple[str, str]] = headers or [] + self.encoded_headers: List[Tuple[bytes, bytes]] = [] + self.factory = factory + self.h11_max_incomplete_event_size = h11_max_incomplete_event_size + + self.loaded = False + self.configure_logging() + + self.reload_dirs: List[Path] = [] + self.reload_dirs_excludes: List[Path] = [] + self.reload_includes: List[str] = [] + self.reload_excludes: List[str] = [] + + if ( + reload_dirs or reload_includes or reload_excludes + ) and not self.should_reload: + logger.warning( + "Current configuration will not reload as not all conditions are met," + "please refer to documentation." + ) + + if self.should_reload: + reload_dirs = _normalize_dirs(reload_dirs) + reload_includes = _normalize_dirs(reload_includes) + reload_excludes = _normalize_dirs(reload_excludes) + + self.reload_includes, self.reload_dirs = resolve_reload_patterns( + reload_includes, reload_dirs + ) + + self.reload_excludes, self.reload_dirs_excludes = resolve_reload_patterns( + reload_excludes, [] + ) + + reload_dirs_tmp = self.reload_dirs.copy() + + for directory in self.reload_dirs_excludes: + for reload_directory in reload_dirs_tmp: + if ( + directory == reload_directory + or directory in reload_directory.parents + ): + try: + self.reload_dirs.remove(reload_directory) + except ValueError: + pass + + for pattern in self.reload_excludes: + if pattern in self.reload_includes: + self.reload_includes.remove(pattern) + + if not self.reload_dirs: + if reload_dirs: + logger.warning( + "Provided reload directories %s did not contain valid " + + "directories, watching current working directory.", + reload_dirs, + ) + self.reload_dirs = [Path(os.getcwd())] + + logger.info( + "Will watch for changes in these directories: %s", + sorted(list(map(str, self.reload_dirs))), + ) + + if env_file is not None: + from dotenv import load_dotenv + + logger.info("Loading environment from '%s'", env_file) + load_dotenv(dotenv_path=env_file) + + if workers is None and "WEB_CONCURRENCY" in os.environ: + self.workers = int(os.environ["WEB_CONCURRENCY"]) + + if forwarded_allow_ips is None: + self.forwarded_allow_ips = os.environ.get( + "FORWARDED_ALLOW_IPS", "127.0.0.1" + ) + else: + self.forwarded_allow_ips = forwarded_allow_ips + + @property + def asgi_version(self) -> Literal["2.0", "3.0"]: + mapping: Dict[str, Literal["2.0", "3.0"]] = { + "asgi2": "2.0", + "asgi3": "3.0", + "wsgi": "3.0", + } + return mapping[self.interface] + + @property + def is_ssl(self) -> bool: + return bool(self.ssl_keyfile or self.ssl_certfile) + + @property + def use_subprocess(self) -> bool: + return bool(self.reload or self.workers > 1) + + def configure_logging(self) -> None: + logging.addLevelName(TRACE_LOG_LEVEL, "TRACE") + + if self.log_config is not None: + if isinstance(self.log_config, dict): + if self.use_colors in (True, False): + self.log_config["formatters"]["default"][ + "use_colors" + ] = self.use_colors + self.log_config["formatters"]["access"][ + "use_colors" + ] = self.use_colors + logging.config.dictConfig(self.log_config) + elif self.log_config.endswith(".json"): + with open(self.log_config) as file: + loaded_config = json.load(file) + logging.config.dictConfig(loaded_config) + elif self.log_config.endswith((".yaml", ".yml")): + with open(self.log_config) as file: + loaded_config = yaml.safe_load(file) + logging.config.dictConfig(loaded_config) + else: + # See the note about fileConfig() here: + # https://docs.python.org/3/library/logging.config.html#configuration-file-format + logging.config.fileConfig( + self.log_config, disable_existing_loggers=False + ) + + if self.log_level is not None: + if isinstance(self.log_level, str): + log_level = LOG_LEVELS[self.log_level] + else: + log_level = self.log_level + logging.getLogger("uvicorn.error").setLevel(log_level) + logging.getLogger("uvicorn.access").setLevel(log_level) + logging.getLogger("uvicorn.asgi").setLevel(log_level) + if self.access_log is False: + logging.getLogger("uvicorn.access").handlers = [] + logging.getLogger("uvicorn.access").propagate = False + + def load(self) -> None: + assert not self.loaded + + if self.is_ssl: + assert self.ssl_certfile + self.ssl: Optional[ssl.SSLContext] = create_ssl_context( + keyfile=self.ssl_keyfile, + certfile=self.ssl_certfile, + password=self.ssl_keyfile_password, + ssl_version=self.ssl_version, + cert_reqs=self.ssl_cert_reqs, + ca_certs=self.ssl_ca_certs, + ciphers=self.ssl_ciphers, + ) + else: + self.ssl = None + + encoded_headers = [ + (key.lower().encode("latin1"), value.encode("latin1")) + for key, value in self.headers + ] + self.encoded_headers = ( + [(b"server", b"uvicorn")] + encoded_headers + if b"server" not in dict(encoded_headers) and self.server_header + else encoded_headers + ) + + if isinstance(self.http, str): + http_protocol_class = import_from_string(HTTP_PROTOCOLS[self.http]) + self.http_protocol_class: Type[asyncio.Protocol] = http_protocol_class + else: + self.http_protocol_class = self.http + + if isinstance(self.ws, str): + ws_protocol_class = import_from_string(WS_PROTOCOLS[self.ws]) + self.ws_protocol_class: Optional[Type[asyncio.Protocol]] = ws_protocol_class + else: + self.ws_protocol_class = self.ws + + self.lifespan_class = import_from_string(LIFESPAN[self.lifespan]) + + try: + self.loaded_app = import_from_string(self.app) + except ImportFromStringError as exc: + logger.error("Error loading ASGI app. %s" % exc) + sys.exit(1) + + try: + self.loaded_app = self.loaded_app() + except TypeError as exc: + if self.factory: + logger.error("Error loading ASGI app factory: %s", exc) + sys.exit(1) + else: + if not self.factory: + logger.warning( + "ASGI app factory detected. Using it, " + "but please consider setting the --factory flag explicitly." + ) + + if self.interface == "auto": + if inspect.isclass(self.loaded_app): + use_asgi_3 = hasattr(self.loaded_app, "__await__") + elif inspect.isfunction(self.loaded_app): + use_asgi_3 = asyncio.iscoroutinefunction(self.loaded_app) + else: + call = getattr(self.loaded_app, "__call__", None) + use_asgi_3 = asyncio.iscoroutinefunction(call) + self.interface = "asgi3" if use_asgi_3 else "asgi2" + + if self.interface == "wsgi": + self.loaded_app = WSGIMiddleware(self.loaded_app) + self.ws_protocol_class = None + elif self.interface == "asgi2": + self.loaded_app = ASGI2Middleware(self.loaded_app) + + if self.debug: + self.loaded_app = DebugMiddleware(self.loaded_app) + if logger.level <= TRACE_LOG_LEVEL: + self.loaded_app = MessageLoggerMiddleware(self.loaded_app) + if self.proxy_headers: + self.loaded_app = ProxyHeadersMiddleware( + self.loaded_app, trusted_hosts=self.forwarded_allow_ips + ) + + self.loaded = True + + def setup_event_loop(self) -> None: + loop_setup: Optional[Callable] = import_from_string(LOOP_SETUPS[self.loop]) + if loop_setup is not None: + loop_setup(use_subprocess=self.use_subprocess) + + def bind_socket(self) -> socket.socket: + logger_args: List[Union[str, int]] + if self.uds: # pragma: py-win32 + path = self.uds + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + sock.bind(path) + uds_perms = 0o666 + os.chmod(self.uds, uds_perms) + except OSError as exc: + logger.error(exc) + sys.exit(1) + + message = "Uvicorn running on unix socket %s (Press CTRL+C to quit)" + sock_name_format = "%s" + color_message = ( + "Uvicorn running on " + + click.style(sock_name_format, bold=True) + + " (Press CTRL+C to quit)" + ) + logger_args = [self.uds] + elif self.fd: # pragma: py-win32 + sock = socket.fromfd(self.fd, socket.AF_UNIX, socket.SOCK_STREAM) + message = "Uvicorn running on socket %s (Press CTRL+C to quit)" + fd_name_format = "%s" + color_message = ( + "Uvicorn running on " + + click.style(fd_name_format, bold=True) + + " (Press CTRL+C to quit)" + ) + logger_args = [sock.getsockname()] + else: + family = socket.AF_INET + addr_format = "%s://%s:%d" + + if self.host and ":" in self.host: # pragma: py-win32 + # It's an IPv6 address. + family = socket.AF_INET6 + addr_format = "%s://[%s]:%d" + + sock = socket.socket(family=family) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + sock.bind((self.host, self.port)) + except OSError as exc: + logger.error(exc) + sys.exit(1) + + message = f"Uvicorn running on {addr_format} (Press CTRL+C to quit)" + color_message = ( + "Uvicorn running on " + + click.style(addr_format, bold=True) + + " (Press CTRL+C to quit)" + ) + protocol_name = "https" if self.is_ssl else "http" + logger_args = [protocol_name, self.host, self.port] + logger.info(message, *logger_args, extra={"color_message": color_message}) + sock.set_inheritable(True) + return sock + + @property + def should_reload(self) -> bool: + return isinstance(self.app, str) and (self.debug or self.reload) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/importer.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/importer.py new file mode 100644 index 00000000..e612bf13 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/importer.py @@ -0,0 +1,38 @@ +import importlib +from typing import Any + + +class ImportFromStringError(Exception): + pass + + +def import_from_string(import_str: Any) -> Any: + if not isinstance(import_str, str): + return import_str + + module_str, _, attrs_str = import_str.partition(":") + if not module_str or not attrs_str: + message = ( + 'Import string "{import_str}" must be in format ":".' + ) + raise ImportFromStringError(message.format(import_str=import_str)) + + try: + module = importlib.import_module(module_str) + except ImportError as exc: + if exc.name != module_str: + raise exc from None + message = 'Could not import module "{module_str}".' + raise ImportFromStringError(message.format(module_str=module_str)) + + instance = module + try: + for attr_str in attrs_str.split("."): + instance = getattr(instance, attr_str) + except AttributeError: + message = 'Attribute "{attrs_str}" not found in module "{module_str}".' + raise ImportFromStringError( + message.format(attrs_str=attrs_str, module_str=module_str) + ) + + return instance diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/off.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/off.py new file mode 100644 index 00000000..7ec961b5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/off.py @@ -0,0 +1,12 @@ +from uvicorn import Config + + +class LifespanOff: + def __init__(self, config: Config) -> None: + self.should_exit = False + + async def startup(self) -> None: + pass + + async def shutdown(self) -> None: + pass diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/on.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/on.py new file mode 100644 index 00000000..0c650aab --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/lifespan/on.py @@ -0,0 +1,137 @@ +import asyncio +import logging +from asyncio import Queue +from typing import TYPE_CHECKING, Union + +from uvicorn import Config + +if TYPE_CHECKING: + from asgiref.typing import ( + LifespanScope, + LifespanShutdownCompleteEvent, + LifespanShutdownEvent, + LifespanShutdownFailedEvent, + LifespanStartupCompleteEvent, + LifespanStartupEvent, + LifespanStartupFailedEvent, + ) + + LifespanReceiveMessage = Union[LifespanStartupEvent, LifespanShutdownEvent] + LifespanSendMessage = Union[ + LifespanStartupFailedEvent, + LifespanShutdownFailedEvent, + LifespanStartupCompleteEvent, + LifespanShutdownCompleteEvent, + ] + + +STATE_TRANSITION_ERROR = "Got invalid state transition on lifespan protocol." + + +class LifespanOn: + def __init__(self, config: Config) -> None: + if not config.loaded: + config.load() + + self.config = config + self.logger = logging.getLogger("uvicorn.error") + self.startup_event = asyncio.Event() + self.shutdown_event = asyncio.Event() + self.receive_queue: "Queue[LifespanReceiveMessage]" = asyncio.Queue() + self.error_occured = False + self.startup_failed = False + self.shutdown_failed = False + self.should_exit = False + + async def startup(self) -> None: + self.logger.info("Waiting for application startup.") + + loop = asyncio.get_event_loop() + main_lifespan_task = loop.create_task(self.main()) # noqa: F841 + # Keep a hard reference to prevent garbage collection + # See https://github.com/encode/uvicorn/pull/972 + startup_event: LifespanStartupEvent = {"type": "lifespan.startup"} + await self.receive_queue.put(startup_event) + await self.startup_event.wait() + + if self.startup_failed or (self.error_occured and self.config.lifespan == "on"): + self.logger.error("Application startup failed. Exiting.") + self.should_exit = True + else: + self.logger.info("Application startup complete.") + + async def shutdown(self) -> None: + if self.error_occured: + return + self.logger.info("Waiting for application shutdown.") + shutdown_event: LifespanShutdownEvent = {"type": "lifespan.shutdown"} + await self.receive_queue.put(shutdown_event) + await self.shutdown_event.wait() + + if self.shutdown_failed or ( + self.error_occured and self.config.lifespan == "on" + ): + self.logger.error("Application shutdown failed. Exiting.") + self.should_exit = True + else: + self.logger.info("Application shutdown complete.") + + async def main(self) -> None: + try: + app = self.config.loaded_app + scope: LifespanScope = { + "type": "lifespan", + "asgi": {"version": self.config.asgi_version, "spec_version": "2.0"}, + } + await app(scope, self.receive, self.send) + except BaseException as exc: + self.asgi = None + self.error_occured = True + if self.startup_failed or self.shutdown_failed: + return + if self.config.lifespan == "auto": + msg = "ASGI 'lifespan' protocol appears unsupported." + self.logger.info(msg) + else: + msg = "Exception in 'lifespan' protocol\n" + self.logger.error(msg, exc_info=exc) + finally: + self.startup_event.set() + self.shutdown_event.set() + + async def send(self, message: "LifespanSendMessage") -> None: + assert message["type"] in ( + "lifespan.startup.complete", + "lifespan.startup.failed", + "lifespan.shutdown.complete", + "lifespan.shutdown.failed", + ) + + if message["type"] == "lifespan.startup.complete": + assert not self.startup_event.is_set(), STATE_TRANSITION_ERROR + assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR + self.startup_event.set() + + elif message["type"] == "lifespan.startup.failed": + assert not self.startup_event.is_set(), STATE_TRANSITION_ERROR + assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR + self.startup_event.set() + self.startup_failed = True + if message.get("message"): + self.logger.error(message["message"]) + + elif message["type"] == "lifespan.shutdown.complete": + assert self.startup_event.is_set(), STATE_TRANSITION_ERROR + assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR + self.shutdown_event.set() + + elif message["type"] == "lifespan.shutdown.failed": + assert self.startup_event.is_set(), STATE_TRANSITION_ERROR + assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR + self.shutdown_event.set() + self.shutdown_failed = True + if message.get("message"): + self.logger.error(message["message"]) + + async def receive(self) -> "LifespanReceiveMessage": + return await self.receive_queue.get() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/logging.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/logging.py new file mode 100644 index 00000000..b12a710d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/logging.py @@ -0,0 +1,122 @@ +import http +import logging +import sys +from copy import copy +from typing import Optional + +import click + +if sys.version_info < (3, 8): # pragma: py-gte-38 + from typing_extensions import Literal +else: # pragma: py-lt-38 + from typing import Literal + +TRACE_LOG_LEVEL = 5 + + +class ColourizedFormatter(logging.Formatter): + """ + A custom log formatter class that: + + * Outputs the LOG_LEVEL with an appropriate color. + * If a log call includes an `extras={"color_message": ...}` it will be used + for formatting the output, instead of the plain text message. + """ + + level_name_colors = { + TRACE_LOG_LEVEL: lambda level_name: click.style(str(level_name), fg="blue"), + logging.DEBUG: lambda level_name: click.style(str(level_name), fg="cyan"), + logging.INFO: lambda level_name: click.style(str(level_name), fg="green"), + logging.WARNING: lambda level_name: click.style(str(level_name), fg="yellow"), + logging.ERROR: lambda level_name: click.style(str(level_name), fg="red"), + logging.CRITICAL: lambda level_name: click.style( + str(level_name), fg="bright_red" + ), + } + + def __init__( + self, + fmt: Optional[str] = None, + datefmt: Optional[str] = None, + style: Literal["%", "{", "$"] = "%", + use_colors: Optional[bool] = None, + ): + if use_colors in (True, False): + self.use_colors = use_colors + else: + self.use_colors = sys.stdout.isatty() + super().__init__(fmt=fmt, datefmt=datefmt, style=style) + + def color_level_name(self, level_name: str, level_no: int) -> str: + def default(level_name: str) -> str: + return str(level_name) # pragma: no cover + + func = self.level_name_colors.get(level_no, default) + return func(level_name) + + def should_use_colors(self) -> bool: + return True # pragma: no cover + + def formatMessage(self, record: logging.LogRecord) -> str: + recordcopy = copy(record) + levelname = recordcopy.levelname + seperator = " " * (8 - len(recordcopy.levelname)) + if self.use_colors: + levelname = self.color_level_name(levelname, recordcopy.levelno) + if "color_message" in recordcopy.__dict__: + recordcopy.msg = recordcopy.__dict__["color_message"] + recordcopy.__dict__["message"] = recordcopy.getMessage() + recordcopy.__dict__["levelprefix"] = levelname + ":" + seperator + return super().formatMessage(recordcopy) + + +class DefaultFormatter(ColourizedFormatter): + def should_use_colors(self) -> bool: + return sys.stderr.isatty() # pragma: no cover + + +class AccessFormatter(ColourizedFormatter): + status_code_colours = { + 1: lambda code: click.style(str(code), fg="bright_white"), + 2: lambda code: click.style(str(code), fg="green"), + 3: lambda code: click.style(str(code), fg="yellow"), + 4: lambda code: click.style(str(code), fg="red"), + 5: lambda code: click.style(str(code), fg="bright_red"), + } + + def get_status_code(self, status_code: int) -> str: + try: + status_phrase = http.HTTPStatus(status_code).phrase + except ValueError: + status_phrase = "" + status_and_phrase = "%s %s" % (status_code, status_phrase) + if self.use_colors: + + def default(code: int) -> str: + return status_and_phrase # pragma: no cover + + func = self.status_code_colours.get(status_code // 100, default) + return func(status_and_phrase) + return status_and_phrase + + def formatMessage(self, record: logging.LogRecord) -> str: + recordcopy = copy(record) + ( + client_addr, + method, + full_path, + http_version, + status_code, + ) = recordcopy.args # type: ignore[misc] + status_code = self.get_status_code(int(status_code)) # type: ignore[arg-type] + request_line = "%s %s HTTP/%s" % (method, full_path, http_version) + if self.use_colors: + request_line = click.style(request_line, bold=True) + recordcopy.__dict__.update( + { + "client_addr": client_addr, + "request_line": request_line, + "status_code": status_code, + } + ) + return super().formatMessage(recordcopy) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/asyncio.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/asyncio.py new file mode 100644 index 00000000..867545f2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/asyncio.py @@ -0,0 +1,10 @@ +import asyncio +import logging +import sys + +logger = logging.getLogger("uvicorn.error") + + +def asyncio_setup(use_subprocess: bool = False) -> None: # pragma: no cover + if sys.version_info >= (3, 8) and sys.platform == "win32" and use_subprocess: + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/auto.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/auto.py new file mode 100644 index 00000000..2285457b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/auto.py @@ -0,0 +1,11 @@ +def auto_loop_setup(use_subprocess: bool = False) -> None: + try: + import uvloop # noqa + except ImportError: # pragma: no cover + from uvicorn.loops.asyncio import asyncio_setup as loop_setup + + loop_setup(use_subprocess=use_subprocess) + else: # pragma: no cover + from uvicorn.loops.uvloop import uvloop_setup + + uvloop_setup(use_subprocess=use_subprocess) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/uvloop.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/uvloop.py new file mode 100644 index 00000000..0e2fd1eb --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/loops/uvloop.py @@ -0,0 +1,7 @@ +import asyncio + +import uvloop + + +def uvloop_setup(use_subprocess: bool = False) -> None: + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/main.py new file mode 100644 index 00000000..f4318ee6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/main.py @@ -0,0 +1,585 @@ +import asyncio +import logging +import os +import platform +import ssl +import sys +import typing + +import click +from h11._connection import DEFAULT_MAX_INCOMPLETE_EVENT_SIZE + +import uvicorn +from uvicorn.config import ( + HTTP_PROTOCOLS, + INTERFACES, + LIFESPAN, + LOG_LEVELS, + LOGGING_CONFIG, + LOOP_SETUPS, + SSL_PROTOCOL_VERSION, + WS_PROTOCOLS, + Config, + HTTPProtocolType, + InterfaceType, + LifespanType, + LoopSetupType, + WSProtocolType, +) +from uvicorn.server import Server, ServerState # noqa: F401 # Used to be defined here. +from uvicorn.supervisors import ChangeReload, Multiprocess + +if typing.TYPE_CHECKING: + from asgiref.typing import ASGIApplication + +LEVEL_CHOICES = click.Choice(list(LOG_LEVELS.keys())) +HTTP_CHOICES = click.Choice(list(HTTP_PROTOCOLS.keys())) +WS_CHOICES = click.Choice(list(WS_PROTOCOLS.keys())) +LIFESPAN_CHOICES = click.Choice(list(LIFESPAN.keys())) +LOOP_CHOICES = click.Choice([key for key in LOOP_SETUPS.keys() if key != "none"]) +INTERFACE_CHOICES = click.Choice(INTERFACES) + +STARTUP_FAILURE = 3 + +logger = logging.getLogger("uvicorn.error") + + +def print_version(ctx: click.Context, param: click.Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + click.echo( + "Running uvicorn %s with %s %s on %s" + % ( + uvicorn.__version__, + platform.python_implementation(), + platform.python_version(), + platform.system(), + ) + ) + ctx.exit() + + +@click.command(context_settings={"auto_envvar_prefix": "UVICORN"}) +@click.argument("app") +@click.option( + "--host", + type=str, + default="127.0.0.1", + help="Bind socket to this host.", + show_default=True, +) +@click.option( + "--port", + type=int, + default=8000, + help="Bind socket to this port.", + show_default=True, +) +@click.option("--uds", type=str, default=None, help="Bind to a UNIX domain socket.") +@click.option( + "--fd", type=int, default=None, help="Bind to socket from this file descriptor." +) +@click.option( + "--debug", is_flag=True, default=False, help="Enable debug mode.", hidden=True +) +@click.option("--reload", is_flag=True, default=False, help="Enable auto-reload.") +@click.option( + "--reload-dir", + "reload_dirs", + multiple=True, + help="Set reload directories explicitly, instead of using the current working" + " directory.", + type=click.Path(exists=True), +) +@click.option( + "--reload-include", + "reload_includes", + multiple=True, + help="Set glob patterns to include while watching for files. Includes '*.py' " + "by default; these defaults can be overridden with `--reload-exclude`. " + "This option has no effect unless watchfiles is installed.", +) +@click.option( + "--reload-exclude", + "reload_excludes", + multiple=True, + help="Set glob patterns to exclude while watching for files. Includes " + "'.*, .py[cod], .sw.*, ~*' by default; these defaults can be overridden " + "with `--reload-include`. This option has no effect unless watchfiles is " + "installed.", +) +@click.option( + "--reload-delay", + type=float, + default=0.25, + show_default=True, + help="Delay between previous and next check if application needs to be." + " Defaults to 0.25s.", +) +@click.option( + "--workers", + default=None, + type=int, + help="Number of worker processes. Defaults to the $WEB_CONCURRENCY environment" + " variable if available, or 1. Not valid with --reload.", +) +@click.option( + "--loop", + type=LOOP_CHOICES, + default="auto", + help="Event loop implementation.", + show_default=True, +) +@click.option( + "--http", + type=HTTP_CHOICES, + default="auto", + help="HTTP protocol implementation.", + show_default=True, +) +@click.option( + "--ws", + type=WS_CHOICES, + default="auto", + help="WebSocket protocol implementation.", + show_default=True, +) +@click.option( + "--ws-max-size", + type=int, + default=16777216, + help="WebSocket max size message in bytes", + show_default=True, +) +@click.option( + "--ws-ping-interval", + type=float, + default=20.0, + help="WebSocket ping interval", + show_default=True, +) +@click.option( + "--ws-ping-timeout", + type=float, + default=20.0, + help="WebSocket ping timeout", + show_default=True, +) +@click.option( + "--ws-per-message-deflate", + type=bool, + default=True, + help="WebSocket per-message-deflate compression", + show_default=True, +) +@click.option( + "--lifespan", + type=LIFESPAN_CHOICES, + default="auto", + help="Lifespan implementation.", + show_default=True, +) +@click.option( + "--interface", + type=INTERFACE_CHOICES, + default="auto", + help="Select ASGI3, ASGI2, or WSGI as the application interface.", + show_default=True, +) +@click.option( + "--env-file", + type=click.Path(exists=True), + default=None, + help="Environment configuration file.", + show_default=True, +) +@click.option( + "--log-config", + type=click.Path(exists=True), + default=None, + help="Logging configuration file. Supported formats: .ini, .json, .yaml.", + show_default=True, +) +@click.option( + "--log-level", + type=LEVEL_CHOICES, + default=None, + help="Log level. [default: info]", + show_default=True, +) +@click.option( + "--access-log/--no-access-log", + is_flag=True, + default=True, + help="Enable/Disable access log.", +) +@click.option( + "--use-colors/--no-use-colors", + is_flag=True, + default=None, + help="Enable/Disable colorized logging.", +) +@click.option( + "--proxy-headers/--no-proxy-headers", + is_flag=True, + default=True, + help="Enable/Disable X-Forwarded-Proto, X-Forwarded-For, X-Forwarded-Port to " + "populate remote address info.", +) +@click.option( + "--server-header/--no-server-header", + is_flag=True, + default=True, + help="Enable/Disable default Server header.", +) +@click.option( + "--date-header/--no-date-header", + is_flag=True, + default=True, + help="Enable/Disable default Date header.", +) +@click.option( + "--forwarded-allow-ips", + type=str, + default=None, + help="Comma separated list of IPs to trust with proxy headers. Defaults to" + " the $FORWARDED_ALLOW_IPS environment variable if available, or '127.0.0.1'.", +) +@click.option( + "--root-path", + type=str, + default="", + help="Set the ASGI 'root_path' for applications submounted below a given URL path.", +) +@click.option( + "--limit-concurrency", + type=int, + default=None, + help="Maximum number of concurrent connections or tasks to allow, before issuing" + " HTTP 503 responses.", +) +@click.option( + "--backlog", + type=int, + default=2048, + help="Maximum number of connections to hold in backlog", +) +@click.option( + "--limit-max-requests", + type=int, + default=None, + help="Maximum number of requests to service before terminating the process.", +) +@click.option( + "--timeout-keep-alive", + type=int, + default=5, + help="Close Keep-Alive connections if no new data is received within this timeout.", + show_default=True, +) +@click.option( + "--ssl-keyfile", type=str, default=None, help="SSL key file", show_default=True +) +@click.option( + "--ssl-certfile", + type=str, + default=None, + help="SSL certificate file", + show_default=True, +) +@click.option( + "--ssl-keyfile-password", + type=str, + default=None, + help="SSL keyfile password", + show_default=True, +) +@click.option( + "--ssl-version", + type=int, + default=int(SSL_PROTOCOL_VERSION), + help="SSL version to use (see stdlib ssl module's)", + show_default=True, +) +@click.option( + "--ssl-cert-reqs", + type=int, + default=int(ssl.CERT_NONE), + help="Whether client certificate is required (see stdlib ssl module's)", + show_default=True, +) +@click.option( + "--ssl-ca-certs", + type=str, + default=None, + help="CA certificates file", + show_default=True, +) +@click.option( + "--ssl-ciphers", + type=str, + default="TLSv1", + help="Ciphers to use (see stdlib ssl module's)", + show_default=True, +) +@click.option( + "--header", + "headers", + multiple=True, + help="Specify custom default HTTP response headers as a Name:Value pair", +) +@click.option( + "--version", + is_flag=True, + callback=print_version, + expose_value=False, + is_eager=True, + help="Display the uvicorn version and exit.", +) +@click.option( + "--app-dir", + default=".", + show_default=True, + help="Look for APP in the specified directory, by adding this to the PYTHONPATH." + " Defaults to the current working directory.", +) +@click.option( + "--h11-max-incomplete-event-size", + "h11_max_incomplete_event_size", + type=int, + default=DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + help="For h11, the maximum number of bytes to buffer of an incomplete event.", +) +@click.option( + "--factory", + is_flag=True, + default=False, + help="Treat APP as an application factory, i.e. a () -> callable.", + show_default=True, +) +def main( + app: str, + host: str, + port: int, + uds: str, + fd: int, + loop: LoopSetupType, + http: HTTPProtocolType, + ws: WSProtocolType, + ws_max_size: int, + ws_ping_interval: float, + ws_ping_timeout: float, + ws_per_message_deflate: bool, + lifespan: LifespanType, + interface: InterfaceType, + debug: bool, + reload: bool, + reload_dirs: typing.List[str], + reload_includes: typing.List[str], + reload_excludes: typing.List[str], + reload_delay: float, + workers: int, + env_file: str, + log_config: str, + log_level: str, + access_log: bool, + proxy_headers: bool, + server_header: bool, + date_header: bool, + forwarded_allow_ips: str, + root_path: str, + limit_concurrency: int, + backlog: int, + limit_max_requests: int, + timeout_keep_alive: int, + ssl_keyfile: str, + ssl_certfile: str, + ssl_keyfile_password: str, + ssl_version: int, + ssl_cert_reqs: int, + ssl_ca_certs: str, + ssl_ciphers: str, + headers: typing.List[str], + use_colors: bool, + app_dir: str, + h11_max_incomplete_event_size: int, + factory: bool, +) -> None: + run( + app, + host=host, + port=port, + uds=uds, + fd=fd, + loop=loop, + http=http, + ws=ws, + ws_max_size=ws_max_size, + ws_ping_interval=ws_ping_interval, + ws_ping_timeout=ws_ping_timeout, + ws_per_message_deflate=ws_per_message_deflate, + lifespan=lifespan, + env_file=env_file, + log_config=LOGGING_CONFIG if log_config is None else log_config, + log_level=log_level, + access_log=access_log, + interface=interface, + debug=debug, + reload=reload, + reload_dirs=reload_dirs or None, + reload_includes=reload_includes or None, + reload_excludes=reload_excludes or None, + reload_delay=reload_delay, + workers=workers, + proxy_headers=proxy_headers, + server_header=server_header, + date_header=date_header, + forwarded_allow_ips=forwarded_allow_ips, + root_path=root_path, + limit_concurrency=limit_concurrency, + backlog=backlog, + limit_max_requests=limit_max_requests, + timeout_keep_alive=timeout_keep_alive, + ssl_keyfile=ssl_keyfile, + ssl_certfile=ssl_certfile, + ssl_keyfile_password=ssl_keyfile_password, + ssl_version=ssl_version, + ssl_cert_reqs=ssl_cert_reqs, + ssl_ca_certs=ssl_ca_certs, + ssl_ciphers=ssl_ciphers, + headers=[header.split(":", 1) for header in headers], # type: ignore[misc] + use_colors=use_colors, + factory=factory, + app_dir=app_dir, + h11_max_incomplete_event_size=h11_max_incomplete_event_size, + ) + + +def run( + app: typing.Union["ASGIApplication", typing.Callable, str], + *, + host: str = "127.0.0.1", + port: int = 8000, + uds: typing.Optional[str] = None, + fd: typing.Optional[int] = None, + loop: LoopSetupType = "auto", + http: typing.Union[typing.Type[asyncio.Protocol], HTTPProtocolType] = "auto", + ws: typing.Union[typing.Type[asyncio.Protocol], WSProtocolType] = "auto", + ws_max_size: int = 16777216, + ws_ping_interval: typing.Optional[float] = 20.0, + ws_ping_timeout: typing.Optional[float] = 20.0, + ws_per_message_deflate: bool = True, + lifespan: LifespanType = "auto", + interface: InterfaceType = "auto", + debug: bool = False, + reload: bool = False, + reload_dirs: typing.Optional[typing.Union[typing.List[str], str]] = None, + reload_includes: typing.Optional[typing.Union[typing.List[str], str]] = None, + reload_excludes: typing.Optional[typing.Union[typing.List[str], str]] = None, + reload_delay: float = 0.25, + workers: typing.Optional[int] = None, + env_file: typing.Optional[typing.Union[str, os.PathLike]] = None, + log_config: typing.Optional[ + typing.Union[typing.Dict[str, typing.Any], str] + ] = LOGGING_CONFIG, + log_level: typing.Optional[typing.Union[str, int]] = None, + access_log: bool = True, + proxy_headers: bool = True, + server_header: bool = True, + date_header: bool = True, + forwarded_allow_ips: typing.Optional[str] = None, + root_path: str = "", + limit_concurrency: typing.Optional[int] = None, + backlog: int = 2048, + limit_max_requests: typing.Optional[int] = None, + timeout_keep_alive: int = 5, + ssl_keyfile: typing.Optional[str] = None, + ssl_certfile: typing.Optional[typing.Union[str, os.PathLike]] = None, + ssl_keyfile_password: typing.Optional[str] = None, + ssl_version: int = SSL_PROTOCOL_VERSION, + ssl_cert_reqs: int = ssl.CERT_NONE, + ssl_ca_certs: typing.Optional[str] = None, + ssl_ciphers: str = "TLSv1", + headers: typing.Optional[typing.List[typing.Tuple[str, str]]] = None, + use_colors: typing.Optional[bool] = None, + app_dir: typing.Optional[str] = None, + factory: bool = False, + h11_max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, +) -> None: + if app_dir is not None: + sys.path.insert(0, app_dir) + + config = Config( + app, + host=host, + port=port, + uds=uds, + fd=fd, + loop=loop, + http=http, + ws=ws, + ws_max_size=ws_max_size, + ws_ping_interval=ws_ping_interval, + ws_ping_timeout=ws_ping_timeout, + ws_per_message_deflate=ws_per_message_deflate, + lifespan=lifespan, + interface=interface, + debug=debug, + reload=reload, + reload_dirs=reload_dirs, + reload_includes=reload_includes, + reload_excludes=reload_excludes, + reload_delay=reload_delay, + workers=workers, + env_file=env_file, + log_config=log_config, + log_level=log_level, + access_log=access_log, + proxy_headers=proxy_headers, + server_header=server_header, + date_header=date_header, + forwarded_allow_ips=forwarded_allow_ips, + root_path=root_path, + limit_concurrency=limit_concurrency, + backlog=backlog, + limit_max_requests=limit_max_requests, + timeout_keep_alive=timeout_keep_alive, + ssl_keyfile=ssl_keyfile, + ssl_certfile=ssl_certfile, + ssl_keyfile_password=ssl_keyfile_password, + ssl_version=ssl_version, + ssl_cert_reqs=ssl_cert_reqs, + ssl_ca_certs=ssl_ca_certs, + ssl_ciphers=ssl_ciphers, + headers=headers, + use_colors=use_colors, + factory=factory, + h11_max_incomplete_event_size=h11_max_incomplete_event_size, + ) + server = Server(config=config) + + if (config.reload or config.workers > 1) and not isinstance(app, str): + logger = logging.getLogger("uvicorn.error") + logger.warning( + "You must pass the application as an import string to enable 'reload' or " + "'workers'." + ) + sys.exit(1) + + if config.should_reload: + sock = config.bind_socket() + ChangeReload(config, target=server.run, sockets=[sock]).run() + elif config.workers > 1: + sock = config.bind_socket() + Multiprocess(config, target=server.run, sockets=[sock]).run() + else: + server.run() + if config.uds: + os.remove(config.uds) # pragma: py-win32 + + if not server.started and not config.should_reload and config.workers == 1: + sys.exit(STARTUP_FAILURE) + + +if __name__ == "__main__": + main() # pragma: no cover diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/asgi2.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/asgi2.py new file mode 100644 index 00000000..c92b6c8f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/asgi2.py @@ -0,0 +1,20 @@ +import typing + +if typing.TYPE_CHECKING: + from asgiref.typing import ( + ASGI2Application, + ASGIReceiveCallable, + ASGISendCallable, + Scope, + ) + + +class ASGI2Middleware: + def __init__(self, app: "ASGI2Application"): + self.app = app + + async def __call__( + self, scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable" + ) -> None: + instance = self.app(scope) + await instance(receive, send) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/debug.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/debug.py new file mode 100644 index 00000000..f94b5f67 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/debug.py @@ -0,0 +1,122 @@ +import html +import traceback +from typing import TYPE_CHECKING, Union + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGI3Application, + ASGIReceiveCallable, + ASGISendCallable, + ASGISendEvent, + HTTPResponseBodyEvent, + HTTPResponseStartEvent, + WWWScope, + ) + + +class HTMLResponse: + def __init__(self, content: str, status_code: int): + self.content = content + self.status_code = status_code + + async def __call__( + self, + scope: "WWWScope", + receive: "ASGIReceiveCallable", + send: "ASGISendCallable", + ) -> None: + response_start: "HTTPResponseStartEvent" = { + "type": "http.response.start", + "status": self.status_code, + "headers": [(b"content-type", b"text/html; charset=utf-8")], + } + await send(response_start) + + response_body: "HTTPResponseBodyEvent" = { + "type": "http.response.body", + "body": self.content.encode("utf-8"), + "more_body": False, + } + await send(response_body) + + +class PlainTextResponse: + def __init__(self, content: str, status_code: int): + self.content = content + self.status_code = status_code + + async def __call__( + self, + scope: "WWWScope", + receive: "ASGIReceiveCallable", + send: "ASGISendCallable", + ) -> None: + response_start: "HTTPResponseStartEvent" = { + "type": "http.response.start", + "status": self.status_code, + "headers": [(b"content-type", b"text/plain; charset=utf-8")], + } + await send(response_start) + + response_body: "HTTPResponseBodyEvent" = { + "type": "http.response.body", + "body": self.content.encode("utf-8"), + "more_body": False, + } + await send(response_body) + + +def get_accept_header(scope: "WWWScope") -> str: + accept = "*/*" + + for key, value in scope.get("headers", []): + if key == b"accept": + accept = value.decode("ascii") + break + + return accept + + +class DebugMiddleware: + def __init__(self, app: "ASGI3Application"): + self.app = app + + async def __call__( + self, + scope: "WWWScope", + receive: "ASGIReceiveCallable", + send: "ASGISendCallable", + ) -> None: + if scope["type"] != "http": + return await self.app(scope, receive, send) + + response_started = False + + async def inner_send(message: "ASGISendEvent") -> None: + nonlocal response_started, send + + if message["type"] == "http.response.start": + response_started = True + await send(message) + + try: + await self.app(scope, receive, inner_send) + except BaseException as exc: + if response_started: + raise exc from None + + accept = get_accept_header(scope) + response: Union[HTMLResponse, PlainTextResponse] + if "text/html" in accept: + exc_html = html.escape(traceback.format_exc()) + content = ( + "

500 Server Error

%s
" + % exc_html + ) + response = HTMLResponse(content, status_code=500) + else: + content = traceback.format_exc() + response = PlainTextResponse(content, status_code=500) + + await response(scope, receive, send) + raise exc from None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/message_logger.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/message_logger.py new file mode 100644 index 00000000..e3d7a572 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/message_logger.py @@ -0,0 +1,89 @@ +import logging +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGI3Application, + ASGIReceiveCallable, + ASGIReceiveEvent, + ASGISendCallable, + ASGISendEvent, + WWWScope, + ) + +from uvicorn.logging import TRACE_LOG_LEVEL + +PLACEHOLDER_FORMAT = { + "body": "<{length} bytes>", + "bytes": "<{length} bytes>", + "text": "<{length} chars>", + "headers": "<...>", +} + + +def message_with_placeholders(message: Any) -> Any: + """ + Return an ASGI message, with any body-type content omitted and replaced + with a placeholder. + """ + new_message = message.copy() + for attr in PLACEHOLDER_FORMAT.keys(): + if message.get(attr) is not None: + content = message[attr] + placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content)) + new_message[attr] = placeholder + return new_message + + +class MessageLoggerMiddleware: + def __init__(self, app: "ASGI3Application"): + self.task_counter = 0 + self.app = app + self.logger = logging.getLogger("uvicorn.asgi") + + def trace(message: Any, *args: Any, **kwargs: Any) -> None: + self.logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs) + + self.logger.trace = trace # type: ignore + + async def __call__( + self, + scope: "WWWScope", + receive: "ASGIReceiveCallable", + send: "ASGISendCallable", + ) -> None: + self.task_counter += 1 + + task_counter = self.task_counter + client = scope.get("client") + prefix = "%s:%d - ASGI" % (client[0], client[1]) if client else "ASGI" + + async def inner_receive() -> "ASGIReceiveEvent": + message = await receive() + logged_message = message_with_placeholders(message) + log_text = "%s [%d] Receive %s" + self.logger.trace( # type: ignore + log_text, prefix, task_counter, logged_message + ) + return message + + async def inner_send(message: "ASGISendEvent") -> None: + logged_message = message_with_placeholders(message) + log_text = "%s [%d] Send %s" + self.logger.trace( # type: ignore + log_text, prefix, task_counter, logged_message + ) + await send(message) + + logged_scope = message_with_placeholders(scope) + log_text = "%s [%d] Started scope=%s" + self.logger.trace(log_text, prefix, task_counter, logged_scope) # type: ignore + try: + await self.app(scope, inner_receive, inner_send) + except BaseException as exc: + log_text = "%s [%d] Raised exception" + self.logger.trace(log_text, prefix, task_counter) # type: ignore + raise exc from None + else: + log_text = "%s [%d] Completed" + self.logger.trace(log_text, prefix, task_counter) # type: ignore diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/proxy_headers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/proxy_headers.py new file mode 100644 index 00000000..4b62cf20 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/proxy_headers.py @@ -0,0 +1,78 @@ +""" +This middleware can be used when a known proxy is fronting the application, +and is trusted to be properly setting the `X-Forwarded-Proto` and +`X-Forwarded-For` headers with the connecting client information. + +Modifies the `client` and `scheme` information so that they reference +the connecting client, rather that the connecting proxy. + +https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers#Proxies +""" +from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGI3Application, + ASGIReceiveCallable, + ASGISendCallable, + HTTPScope, + Scope, + WebSocketScope, + ) + + +class ProxyHeadersMiddleware: + def __init__( + self, + app: "ASGI3Application", + trusted_hosts: Union[List[str], str] = "127.0.0.1", + ) -> None: + self.app = app + if isinstance(trusted_hosts, str): + self.trusted_hosts = {item.strip() for item in trusted_hosts.split(",")} + else: + self.trusted_hosts = set(trusted_hosts) + self.always_trust = "*" in self.trusted_hosts + + def get_trusted_client_host( + self, x_forwarded_for_hosts: List[str] + ) -> Optional[str]: + if self.always_trust: + return x_forwarded_for_hosts[0] + + for host in reversed(x_forwarded_for_hosts): + if host not in self.trusted_hosts: + return host + + return None + + async def __call__( + self, scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable" + ) -> None: + if scope["type"] in ("http", "websocket"): + scope = cast(Union["HTTPScope", "WebSocketScope"], scope) + client_addr: Optional[Tuple[str, int]] = scope.get("client") + client_host = client_addr[0] if client_addr else None + + if self.always_trust or client_host in self.trusted_hosts: + headers = dict(scope["headers"]) + + if b"x-forwarded-proto" in headers: + # Determine if the incoming request was http or https based on + # the X-Forwarded-Proto header. + x_forwarded_proto = headers[b"x-forwarded-proto"].decode("latin1") + scope["scheme"] = x_forwarded_proto.strip() # type: ignore[index] + + if b"x-forwarded-for" in headers: + # Determine the client address from the last trusted IP in the + # X-Forwarded-For header. We've lost the connecting client's port + # information by now, so only include the host. + x_forwarded_for = headers[b"x-forwarded-for"].decode("latin1") + x_forwarded_for_hosts = [ + item.strip() for item in x_forwarded_for.split(",") + ] + host = self.get_trusted_client_host(x_forwarded_for_hosts) + port = 0 + scope["client"] = (host, port) # type: ignore[arg-type] + + return await self.app(scope, receive, send) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/wsgi.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/wsgi.py new file mode 100644 index 00000000..9a79a3ba --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/middleware/wsgi.py @@ -0,0 +1,190 @@ +import asyncio +import concurrent.futures +import io +import sys +from collections import deque +from typing import TYPE_CHECKING, Deque, Iterable, Optional, Tuple + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGIReceiveCallable, + ASGIReceiveEvent, + ASGISendCallable, + ASGISendEvent, + HTTPRequestEvent, + HTTPResponseBodyEvent, + HTTPResponseStartEvent, + HTTPScope, + ) + +from uvicorn._types import Environ, ExcInfo, StartResponse, WSGIApp + + +def build_environ( + scope: "HTTPScope", message: "ASGIReceiveEvent", body: io.BytesIO +) -> Environ: + """ + Builds a scope and request message into a WSGI environ object. + """ + environ = { + "REQUEST_METHOD": scope["method"], + "SCRIPT_NAME": "", + "PATH_INFO": scope["path"].encode("utf8").decode("latin1"), + "QUERY_STRING": scope["query_string"].decode("ascii"), + "SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"], + "wsgi.version": (1, 0), + "wsgi.url_scheme": scope.get("scheme", "http"), + "wsgi.input": body, + "wsgi.errors": sys.stdout, + "wsgi.multithread": True, + "wsgi.multiprocess": True, + "wsgi.run_once": False, + } + + # Get server name and port - required in WSGI, not in ASGI + server = scope.get("server") + if server is None: + server = ("localhost", 80) + environ["SERVER_NAME"] = server[0] + environ["SERVER_PORT"] = server[1] + + # Get client IP address + client = scope.get("client") + if client is not None: + environ["REMOTE_ADDR"] = client[0] + + # Go through headers and make them into environ entries + for name, value in scope.get("headers", []): + name_str: str = name.decode("latin1") + if name_str == "content-length": + corrected_name = "CONTENT_LENGTH" + elif name_str == "content-type": + corrected_name = "CONTENT_TYPE" + else: + corrected_name = "HTTP_%s" % name_str.upper().replace("-", "_") + # HTTPbis say only ASCII chars are allowed in headers, but we latin1 + # just in case + value_str: str = value.decode("latin1") + if corrected_name in environ: + corrected_name_environ = environ[corrected_name] + assert isinstance(corrected_name_environ, str) + value_str = corrected_name_environ + "," + value_str + environ[corrected_name] = value_str + return environ + + +class WSGIMiddleware: + def __init__(self, app: WSGIApp, workers: int = 10): + self.app = app + self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=workers) + + async def __call__( + self, + scope: "HTTPScope", + receive: "ASGIReceiveCallable", + send: "ASGISendCallable", + ) -> None: + assert scope["type"] == "http" + instance = WSGIResponder(self.app, self.executor, scope) + await instance(receive, send) + + +class WSGIResponder: + def __init__( + self, + app: WSGIApp, + executor: concurrent.futures.ThreadPoolExecutor, + scope: "HTTPScope", + ): + self.app = app + self.executor = executor + self.scope = scope + self.status = None + self.response_headers = None + self.send_event = asyncio.Event() + self.send_queue: Deque[Optional["ASGISendEvent"]] = deque() + self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() + self.response_started = False + self.exc_info: Optional[ExcInfo] = None + + async def __call__( + self, receive: "ASGIReceiveCallable", send: "ASGISendCallable" + ) -> None: + message: HTTPRequestEvent = await receive() # type: ignore[assignment] + body = io.BytesIO(message.get("body", b"")) + more_body = message.get("more_body", False) + if more_body: + body.seek(0, io.SEEK_END) + while more_body: + body_message: "HTTPRequestEvent" = ( + await receive() # type: ignore[assignment] + ) + body.write(body_message.get("body", b"")) + more_body = body_message.get("more_body", False) + body.seek(0) + environ = build_environ(self.scope, message, body) + self.loop = asyncio.get_event_loop() + wsgi = self.loop.run_in_executor( + self.executor, self.wsgi, environ, self.start_response + ) + sender = self.loop.create_task(self.sender(send)) + try: + await asyncio.wait_for(wsgi, None) + finally: + self.send_queue.append(None) + self.send_event.set() + await asyncio.wait_for(sender, None) + if self.exc_info is not None: + raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2]) + + async def sender(self, send: "ASGISendCallable") -> None: + while True: + if self.send_queue: + message = self.send_queue.popleft() + if message is None: + return + await send(message) + else: + await self.send_event.wait() + self.send_event.clear() + + def start_response( + self, + status: str, + response_headers: Iterable[Tuple[str, str]], + exc_info: Optional[ExcInfo] = None, + ) -> None: + self.exc_info = exc_info + if not self.response_started: + self.response_started = True + status_code_str, _ = status.split(" ", 1) + status_code = int(status_code_str) + headers = [ + (name.encode("ascii"), value.encode("ascii")) + for name, value in response_headers + ] + http_response_start_event: HTTPResponseStartEvent = { + "type": "http.response.start", + "status": status_code, + "headers": headers, + } + self.send_queue.append(http_response_start_event) + self.loop.call_soon_threadsafe(self.send_event.set) + + def wsgi(self, environ: Environ, start_response: StartResponse) -> None: + for chunk in self.app(environ, start_response): # type: ignore + response_body: HTTPResponseBodyEvent = { + "type": "http.response.body", + "body": chunk, + "more_body": True, + } + self.send_queue.append(response_body) + self.loop.call_soon_threadsafe(self.send_event.set) + + empty_body: HTTPResponseBodyEvent = { + "type": "http.response.body", + "body": b"", + "more_body": False, + } + self.send_queue.append(empty_body) + self.loop.call_soon_threadsafe(self.send_event.set) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/auto.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/auto.py new file mode 100644 index 00000000..1aa99674 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/auto.py @@ -0,0 +1,14 @@ +import asyncio +from typing import Type + +AutoHTTPProtocol: Type[asyncio.Protocol] +try: + import httptools # noqa +except ImportError: # pragma: no cover + from uvicorn.protocols.http.h11_impl import H11Protocol + + AutoHTTPProtocol = H11Protocol +else: # pragma: no cover + from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol + + AutoHTTPProtocol = HttpToolsProtocol diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/flow_control.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/flow_control.py new file mode 100644 index 00000000..452c55f4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/flow_control.py @@ -0,0 +1,68 @@ +import asyncio +import typing + +if typing.TYPE_CHECKING: + from asgiref.typing import ( + ASGIReceiveCallable, + ASGISendCallable, + HTTPResponseBodyEvent, + HTTPResponseStartEvent, + Scope, + ) + +CLOSE_HEADER = (b"connection", b"close") + +HIGH_WATER_LIMIT = 65536 + + +class FlowControl: + def __init__(self, transport: asyncio.Transport) -> None: + self._transport = transport + self.read_paused = False + self.write_paused = False + self._is_writable_event = asyncio.Event() + self._is_writable_event.set() + + async def drain(self) -> None: + await self._is_writable_event.wait() + + def pause_reading(self) -> None: + if not self.read_paused: + self.read_paused = True + self._transport.pause_reading() + + def resume_reading(self) -> None: + if self.read_paused: + self.read_paused = False + self._transport.resume_reading() + + def pause_writing(self) -> None: + if not self.write_paused: + self.write_paused = True + self._is_writable_event.clear() + + def resume_writing(self) -> None: + if self.write_paused: + self.write_paused = False + self._is_writable_event.set() + + +async def service_unavailable( + scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable" +) -> None: + response_start: "HTTPResponseStartEvent" = { + "type": "http.response.start", + "status": 503, + "headers": [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ], + } + await send(response_start) + + response_body: "HTTPResponseBodyEvent" = { + "type": "http.response.body", + "body": b"Service Unavailable", + "more_body": False, + } + await send(response_body) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/h11_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/h11_impl.py new file mode 100644 index 00000000..5fff70bb --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/h11_impl.py @@ -0,0 +1,557 @@ +import asyncio +import http +import logging +import sys +from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union, cast +from urllib.parse import unquote + +import h11 + +from uvicorn.config import Config +from uvicorn.logging import TRACE_LOG_LEVEL +from uvicorn.protocols.http.flow_control import ( + CLOSE_HEADER, + HIGH_WATER_LIMIT, + FlowControl, + service_unavailable, +) +from uvicorn.protocols.utils import ( + get_client_addr, + get_local_addr, + get_path_with_query_string, + get_remote_addr, + is_ssl, +) +from uvicorn.server import ServerState + +if sys.version_info < (3, 8): # pragma: py-gte-38 + from typing_extensions import Literal +else: # pragma: py-lt-38 + from typing import Literal + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGI3Application, + ASGIReceiveEvent, + ASGISendEvent, + HTTPDisconnectEvent, + HTTPRequestEvent, + HTTPResponseBodyEvent, + HTTPResponseStartEvent, + HTTPScope, + ) + +H11Event = Union[ + h11.Request, + h11.InformationalResponse, + h11.Response, + h11.Data, + h11.EndOfMessage, + h11.ConnectionClosed, +] + + +def _get_status_phrase(status_code: int) -> bytes: + try: + return http.HTTPStatus(status_code).phrase.encode() + except ValueError: + return b"" + + +STATUS_PHRASES = { + status_code: _get_status_phrase(status_code) for status_code in range(100, 600) +} + + +class H11Protocol(asyncio.Protocol): + def __init__( + self, + config: Config, + server_state: ServerState, + _loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + if not config.loaded: + config.load() + + self.config = config + self.app = config.loaded_app + self.loop = _loop or asyncio.get_event_loop() + self.logger = logging.getLogger("uvicorn.error") + self.access_logger = logging.getLogger("uvicorn.access") + self.access_log = self.access_logger.hasHandlers() + self.conn = h11.Connection(h11.SERVER, config.h11_max_incomplete_event_size) + self.ws_protocol_class = config.ws_protocol_class + self.root_path = config.root_path + self.limit_concurrency = config.limit_concurrency + + # Timeouts + self.timeout_keep_alive_task: Optional[asyncio.TimerHandle] = None + self.timeout_keep_alive = config.timeout_keep_alive + + # Shared server state + self.server_state = server_state + self.connections = server_state.connections + self.tasks = server_state.tasks + self.default_headers = server_state.default_headers + + # Per-connection state + self.transport: asyncio.Transport = None # type: ignore[assignment] + self.flow: FlowControl = None # type: ignore[assignment] + self.server: Optional[Tuple[str, int]] = None + self.client: Optional[Tuple[str, int]] = None + self.scheme: Optional[Literal["http", "https"]] = None + + # Per-request state + self.scope: HTTPScope = None # type: ignore[assignment] + self.headers: List[Tuple[bytes, bytes]] = None # type: ignore[assignment] + self.cycle: RequestResponseCycle = None # type: ignore[assignment] + + # Protocol interface + def connection_made( # type: ignore[override] + self, transport: asyncio.Transport + ) -> None: + self.connections.add(self) + + self.transport = transport + self.flow = FlowControl(transport) + self.server = get_local_addr(transport) + self.client = get_remote_addr(transport) + self.scheme = "https" if is_ssl(transport) else "http" + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection made", prefix) + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.connections.discard(self) + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection lost", prefix) + + if self.cycle and not self.cycle.response_complete: + self.cycle.disconnected = True + if self.conn.our_state != h11.ERROR: + event = h11.ConnectionClosed() + try: + self.conn.send(event) + except h11.LocalProtocolError: + # Premature client disconnect + pass + + if self.cycle is not None: + self.cycle.message_event.set() + if self.flow is not None: + self.flow.resume_writing() + if exc is None: + self.transport.close() + self._unset_keepalive_if_required() + + def eof_received(self) -> None: + pass + + def _unset_keepalive_if_required(self) -> None: + if self.timeout_keep_alive_task is not None: + self.timeout_keep_alive_task.cancel() + self.timeout_keep_alive_task = None + + def data_received(self, data: bytes) -> None: + self._unset_keepalive_if_required() + + self.conn.receive_data(data) + self.handle_events() + + def handle_events(self) -> None: + while True: + try: + event = self.conn.next_event() + except h11.RemoteProtocolError: + msg = "Invalid HTTP request received." + self.logger.warning(msg) + self.send_400_response(msg) + return + event_type = type(event) + + if event_type is h11.NEED_DATA: + break + + elif event_type is h11.PAUSED: + # This case can occur in HTTP pipelining, so we need to + # stop reading any more data, and ensure that at the end + # of the active request/response cycle we handle any + # events that have been buffered up. + self.flow.pause_reading() + break + + elif event_type is h11.Request: + self.headers = [(key.lower(), value) for key, value in event.headers] + raw_path, _, query_string = event.target.partition(b"?") + self.scope = { # type: ignore[typeddict-item] + "type": "http", + "asgi": { + "version": self.config.asgi_version, + "spec_version": "2.3", + }, + "http_version": event.http_version.decode("ascii"), + "server": self.server, + "client": self.client, + "scheme": self.scheme, + "method": event.method.decode("ascii"), + "root_path": self.root_path, + "path": unquote(raw_path.decode("ascii")), + "raw_path": raw_path, + "query_string": query_string, + "headers": self.headers, + } + + for name, value in self.headers: + if name == b"connection": + tokens = [token.lower().strip() for token in value.split(b",")] + if b"upgrade" in tokens: + self.handle_upgrade(event) + return + + # Handle 503 responses when 'limit_concurrency' is exceeded. + if self.limit_concurrency is not None and ( + len(self.connections) >= self.limit_concurrency + or len(self.tasks) >= self.limit_concurrency + ): + app = service_unavailable + message = "Exceeded concurrency limit." + self.logger.warning(message) + else: + app = self.app + + self.cycle = RequestResponseCycle( + scope=self.scope, + conn=self.conn, + transport=self.transport, + flow=self.flow, + logger=self.logger, + access_logger=self.access_logger, + access_log=self.access_log, + default_headers=self.default_headers, + message_event=asyncio.Event(), + on_response=self.on_response_complete, + ) + task = self.loop.create_task(self.cycle.run_asgi(app)) + task.add_done_callback(self.tasks.discard) + self.tasks.add(task) + + elif event_type is h11.Data: + if self.conn.our_state is h11.DONE: + continue + self.cycle.body += event.data + if len(self.cycle.body) > HIGH_WATER_LIMIT: + self.flow.pause_reading() + self.cycle.message_event.set() + + elif event_type is h11.EndOfMessage: + if self.conn.our_state is h11.DONE: + self.transport.resume_reading() + self.conn.start_next_cycle() + continue + self.cycle.more_body = False + self.cycle.message_event.set() + + def handle_upgrade(self, event: H11Event) -> None: + upgrade_value = None + for name, value in self.headers: + if name == b"upgrade": + upgrade_value = value.lower() + + if upgrade_value != b"websocket" or self.ws_protocol_class is None: + msg = "Unsupported upgrade request." + self.logger.warning(msg) + from uvicorn.protocols.websockets.auto import AutoWebSocketsProtocol + + if AutoWebSocketsProtocol is None: # pragma: no cover + msg = "No supported WebSocket library detected. Please use 'pip install uvicorn[standard]', or install 'websockets' or 'wsproto' manually." # noqa: E501 + self.logger.warning(msg) + self.send_400_response(msg) + return + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sUpgrading to WebSocket", prefix) + + self.connections.discard(self) + output = [event.method, b" ", event.target, b" HTTP/1.1\r\n"] + for name, value in self.headers: + output += [name, b": ", value, b"\r\n"] + output.append(b"\r\n") + protocol = self.ws_protocol_class( # type: ignore[call-arg] + config=self.config, server_state=self.server_state + ) + protocol.connection_made(self.transport) + protocol.data_received(b"".join(output)) + self.transport.set_protocol(protocol) + + def send_400_response(self, msg: str) -> None: + + reason = STATUS_PHRASES[400] + headers = [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ] + event = h11.Response(status_code=400, headers=headers, reason=reason) + output = self.conn.send(event) + self.transport.write(output) + event = h11.Data(data=msg.encode("ascii")) + output = self.conn.send(event) + self.transport.write(output) + event = h11.EndOfMessage() + output = self.conn.send(event) + self.transport.write(output) + self.transport.close() + + def on_response_complete(self) -> None: + self.server_state.total_requests += 1 + + if self.transport.is_closing(): + return + + # Set a short Keep-Alive timeout. + self._unset_keepalive_if_required() + + self.timeout_keep_alive_task = self.loop.call_later( + self.timeout_keep_alive, self.timeout_keep_alive_handler + ) + + # Unpause data reads if needed. + self.flow.resume_reading() + + # Unblock any pipelined events. + if self.conn.our_state is h11.DONE and self.conn.their_state is h11.DONE: + self.conn.start_next_cycle() + self.handle_events() + + def shutdown(self) -> None: + """ + Called by the server to commence a graceful shutdown. + """ + if self.cycle is None or self.cycle.response_complete: + event = h11.ConnectionClosed() + self.conn.send(event) + self.transport.close() + else: + self.cycle.keep_alive = False + + def pause_writing(self) -> None: + """ + Called by the transport when the write buffer exceeds the high water mark. + """ + self.flow.pause_writing() + + def resume_writing(self) -> None: + """ + Called by the transport when the write buffer drops below the low water mark. + """ + self.flow.resume_writing() + + def timeout_keep_alive_handler(self) -> None: + """ + Called on a keep-alive connection if no new data is received after a short + delay. + """ + if not self.transport.is_closing(): + event = h11.ConnectionClosed() + self.conn.send(event) + self.transport.close() + + +class RequestResponseCycle: + def __init__( + self, + scope: "HTTPScope", + conn: h11.Connection, + transport: asyncio.Transport, + flow: FlowControl, + logger: logging.Logger, + access_logger: logging.Logger, + access_log: bool, + default_headers: List[Tuple[bytes, bytes]], + message_event: asyncio.Event, + on_response: Callable[..., None], + ) -> None: + self.scope = scope + self.conn = conn + self.transport = transport + self.flow = flow + self.logger = logger + self.access_logger = access_logger + self.access_log = access_log + self.default_headers = default_headers + self.message_event = message_event + self.on_response = on_response + + # Connection state + self.disconnected = False + self.keep_alive = True + self.waiting_for_100_continue = conn.they_are_waiting_for_100_continue + + # Request state + self.body = b"" + self.more_body = True + + # Response state + self.response_started = False + self.response_complete = False + + # ASGI exception wrapper + async def run_asgi(self, app: "ASGI3Application") -> None: + try: + result = await app( # type: ignore[func-returns-value] + self.scope, self.receive, self.send + ) + except BaseException as exc: + msg = "Exception in ASGI application\n" + self.logger.error(msg, exc_info=exc) + if not self.response_started: + await self.send_500_response() + else: + self.transport.close() + else: + if result is not None: + msg = "ASGI callable should return None, but returned '%s'." + self.logger.error(msg, result) + self.transport.close() + elif not self.response_started and not self.disconnected: + msg = "ASGI callable returned without starting response." + self.logger.error(msg) + await self.send_500_response() + elif not self.response_complete and not self.disconnected: + msg = "ASGI callable returned without completing response." + self.logger.error(msg) + self.transport.close() + finally: + self.on_response = lambda: None + + async def send_500_response(self) -> None: + response_start_event: "HTTPResponseStartEvent" = { + "type": "http.response.start", + "status": 500, + "headers": [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ], + } + await self.send(response_start_event) + response_body_event: "HTTPResponseBodyEvent" = { + "type": "http.response.body", + "body": b"Internal Server Error", + "more_body": False, + } + await self.send(response_body_event) + + # ASGI interface + async def send(self, message: "ASGISendEvent") -> None: + message_type = message["type"] + + if self.flow.write_paused and not self.disconnected: + await self.flow.drain() + + if self.disconnected: + return + + if not self.response_started: + # Sending response status line and headers + if message_type != "http.response.start": + msg = "Expected ASGI message 'http.response.start', but got '%s'." + raise RuntimeError(msg % message_type) + message = cast("HTTPResponseStartEvent", message) + + self.response_started = True + self.waiting_for_100_continue = False + + status_code = message["status"] + message_headers = cast( + List[Tuple[bytes, bytes]], message.get("headers", []) + ) + headers = self.default_headers + message_headers + + if CLOSE_HEADER in self.scope["headers"] and CLOSE_HEADER not in headers: + headers = headers + [CLOSE_HEADER] + + if self.access_log: + self.access_logger.info( + '%s - "%s %s HTTP/%s" %d', + get_client_addr(self.scope), + self.scope["method"], + get_path_with_query_string(self.scope), + self.scope["http_version"], + status_code, + ) + + # Write response status line and headers + reason = STATUS_PHRASES[status_code] + event = h11.Response( + status_code=status_code, headers=headers, reason=reason + ) + output = self.conn.send(event) + self.transport.write(output) + + elif not self.response_complete: + # Sending response body + if message_type != "http.response.body": + msg = "Expected ASGI message 'http.response.body', but got '%s'." + raise RuntimeError(msg % message_type) + message = cast("HTTPResponseBodyEvent", message) + + body = message.get("body", b"") + more_body = message.get("more_body", False) + + # Write response body + if self.scope["method"] == "HEAD": + event = h11.Data(data=b"") + else: + event = h11.Data(data=body) + output = self.conn.send(event) + self.transport.write(output) + + # Handle response completion + if not more_body: + self.response_complete = True + self.message_event.set() + event = h11.EndOfMessage() + output = self.conn.send(event) + self.transport.write(output) + + else: + # Response already sent + msg = "Unexpected ASGI message '%s' sent, after response already completed." + raise RuntimeError(msg % message_type) + + if self.response_complete: + if self.conn.our_state is h11.MUST_CLOSE or not self.keep_alive: + event = h11.ConnectionClosed() + self.conn.send(event) + self.transport.close() + self.on_response() + + async def receive(self) -> "ASGIReceiveEvent": + if self.waiting_for_100_continue and not self.transport.is_closing(): + event = h11.InformationalResponse( + status_code=100, headers=[], reason="Continue" + ) + output = self.conn.send(event) + self.transport.write(output) + self.waiting_for_100_continue = False + + if not self.disconnected and not self.response_complete: + self.flow.resume_reading() + await self.message_event.wait() + self.message_event.clear() + + message: "Union[HTTPDisconnectEvent, HTTPRequestEvent]" + if self.disconnected or self.response_complete: + message = {"type": "http.disconnect"} + else: + message = { + "type": "http.request", + "body": self.body, + "more_body": self.more_body, + } + self.body = b"" + + return message diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/httptools_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/httptools_impl.py new file mode 100644 index 00000000..f018c59a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/http/httptools_impl.py @@ -0,0 +1,579 @@ +import asyncio +import http +import logging +import re +import sys +import urllib +from asyncio.events import TimerHandle +from collections import deque +from typing import TYPE_CHECKING, Callable, Deque, List, Optional, Tuple, Union, cast + +import httptools + +from uvicorn.config import Config +from uvicorn.logging import TRACE_LOG_LEVEL +from uvicorn.protocols.http.flow_control import ( + CLOSE_HEADER, + HIGH_WATER_LIMIT, + FlowControl, + service_unavailable, +) +from uvicorn.protocols.utils import ( + get_client_addr, + get_local_addr, + get_path_with_query_string, + get_remote_addr, + is_ssl, +) +from uvicorn.server import ServerState + +if sys.version_info < (3, 8): # pragma: py-gte-38 + from typing_extensions import Literal +else: # pragma: py-lt-38 + from typing import Literal + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGI3Application, + ASGIReceiveEvent, + ASGISendEvent, + HTTPDisconnectEvent, + HTTPRequestEvent, + HTTPResponseBodyEvent, + HTTPResponseStartEvent, + HTTPScope, + ) + +HEADER_RE = re.compile(b'[\x00-\x1F\x7F()<>@,;:[]={} \t\\"]') +HEADER_VALUE_RE = re.compile(b"[\x00-\x1F\x7F]") + + +def _get_status_line(status_code: int) -> bytes: + try: + phrase = http.HTTPStatus(status_code).phrase.encode() + except ValueError: + phrase = b"" + return b"".join([b"HTTP/1.1 ", str(status_code).encode(), b" ", phrase, b"\r\n"]) + + +STATUS_LINE = { + status_code: _get_status_line(status_code) for status_code in range(100, 600) +} + + +class HttpToolsProtocol(asyncio.Protocol): + def __init__( + self, + config: Config, + server_state: ServerState, + _loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + if not config.loaded: + config.load() + + self.config = config + self.app = config.loaded_app + self.loop = _loop or asyncio.get_event_loop() + self.logger = logging.getLogger("uvicorn.error") + self.access_logger = logging.getLogger("uvicorn.access") + self.access_log = self.access_logger.hasHandlers() + self.parser = httptools.HttpRequestParser(self) + self.ws_protocol_class = config.ws_protocol_class + self.root_path = config.root_path + self.limit_concurrency = config.limit_concurrency + + # Timeouts + self.timeout_keep_alive_task: Optional[TimerHandle] = None + self.timeout_keep_alive = config.timeout_keep_alive + + # Global state + self.server_state = server_state + self.connections = server_state.connections + self.tasks = server_state.tasks + self.default_headers = server_state.default_headers + + # Per-connection state + self.transport: asyncio.Transport = None # type: ignore[assignment] + self.flow: FlowControl = None # type: ignore[assignment] + self.server: Optional[Tuple[str, int]] = None + self.client: Optional[Tuple[str, int]] = None + self.scheme: Optional[Literal["http", "https"]] = None + self.pipeline: Deque[Tuple[RequestResponseCycle, ASGI3Application]] = deque() + + # Per-request state + self.scope: HTTPScope = None # type: ignore[assignment] + self.headers: List[Tuple[bytes, bytes]] = None # type: ignore[assignment] + self.expect_100_continue = False + self.cycle: RequestResponseCycle = None # type: ignore[assignment] + + # Protocol interface + def connection_made( # type: ignore[override] + self, transport: asyncio.Transport + ) -> None: + self.connections.add(self) + + self.transport = transport + self.flow = FlowControl(transport) + self.server = get_local_addr(transport) + self.client = get_remote_addr(transport) + self.scheme = "https" if is_ssl(transport) else "http" + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection made", prefix) + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.connections.discard(self) + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection lost", prefix) + + if self.cycle and not self.cycle.response_complete: + self.cycle.disconnected = True + if self.cycle is not None: + self.cycle.message_event.set() + if self.flow is not None: + self.flow.resume_writing() + if exc is None: + self.transport.close() + self._unset_keepalive_if_required() + + self.parser = None + + def eof_received(self) -> None: + pass + + def _unset_keepalive_if_required(self) -> None: + if self.timeout_keep_alive_task is not None: + self.timeout_keep_alive_task.cancel() + self.timeout_keep_alive_task = None + + def data_received(self, data: bytes) -> None: + self._unset_keepalive_if_required() + + try: + self.parser.feed_data(data) + except httptools.HttpParserError: + msg = "Invalid HTTP request received." + self.logger.warning(msg) + self.send_400_response(msg) + return + except httptools.HttpParserUpgrade: + self.handle_upgrade() + + def handle_upgrade(self) -> None: + upgrade_value = None + for name, value in self.headers: + if name == b"upgrade": + upgrade_value = value.lower() + + if upgrade_value != b"websocket" or self.ws_protocol_class is None: + msg = "Unsupported upgrade request." + self.logger.warning(msg) + from uvicorn.protocols.websockets.auto import AutoWebSocketsProtocol + + if AutoWebSocketsProtocol is None: # pragma: no cover + msg = "No supported WebSocket library detected. Please use 'pip install uvicorn[standard]', or install 'websockets' or 'wsproto' manually." # noqa: E501 + self.logger.warning(msg) + self.send_400_response(msg) + return + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sUpgrading to WebSocket", prefix) + + self.connections.discard(self) + method = self.scope["method"].encode() + output = [method, b" ", self.url, b" HTTP/1.1\r\n"] + for name, value in self.scope["headers"]: + output += [name, b": ", value, b"\r\n"] + output.append(b"\r\n") + protocol = self.ws_protocol_class( # type: ignore[call-arg] + config=self.config, server_state=self.server_state + ) + protocol.connection_made(self.transport) + protocol.data_received(b"".join(output)) + self.transport.set_protocol(protocol) + + def send_400_response(self, msg: str) -> None: + + content = [STATUS_LINE[400]] + for name, value in self.default_headers: + content.extend([name, b": ", value, b"\r\n"]) + content.extend( + [ + b"content-type: text/plain; charset=utf-8\r\n", + b"content-length: " + str(len(msg)).encode("ascii") + b"\r\n", + b"connection: close\r\n", + b"\r\n", + msg.encode("ascii"), + ] + ) + self.transport.write(b"".join(content)) + self.transport.close() + + def on_message_begin(self) -> None: + self.url = b"" + self.expect_100_continue = False + self.headers = [] + self.scope = { # type: ignore[typeddict-item] + "type": "http", + "asgi": {"version": self.config.asgi_version, "spec_version": "2.3"}, + "http_version": "1.1", + "server": self.server, + "client": self.client, + "scheme": self.scheme, + "root_path": self.root_path, + "headers": self.headers, + } + + # Parser callbacks + def on_url(self, url: bytes) -> None: + self.url += url + + def on_header(self, name: bytes, value: bytes) -> None: + name = name.lower() + if name == b"expect" and value.lower() == b"100-continue": + self.expect_100_continue = True + self.headers.append((name, value)) + + def on_headers_complete(self) -> None: + http_version = self.parser.get_http_version() + method = self.parser.get_method() + self.scope["method"] = method.decode("ascii") + if http_version != "1.1": + self.scope["http_version"] = http_version + if self.parser.should_upgrade(): + return + parsed_url = httptools.parse_url(self.url) + raw_path = parsed_url.path + path = raw_path.decode("ascii") + if "%" in path: + path = urllib.parse.unquote(path) + self.scope["path"] = path + self.scope["raw_path"] = raw_path + self.scope["query_string"] = parsed_url.query or b"" + + # Handle 503 responses when 'limit_concurrency' is exceeded. + if self.limit_concurrency is not None and ( + len(self.connections) >= self.limit_concurrency + or len(self.tasks) >= self.limit_concurrency + ): + app = service_unavailable + message = "Exceeded concurrency limit." + self.logger.warning(message) + else: + app = self.app + + existing_cycle = self.cycle + self.cycle = RequestResponseCycle( + scope=self.scope, + transport=self.transport, + flow=self.flow, + logger=self.logger, + access_logger=self.access_logger, + access_log=self.access_log, + default_headers=self.default_headers, + message_event=asyncio.Event(), + expect_100_continue=self.expect_100_continue, + keep_alive=http_version != "1.0", + on_response=self.on_response_complete, + ) + if existing_cycle is None or existing_cycle.response_complete: + # Standard case - start processing the request. + task = self.loop.create_task(self.cycle.run_asgi(app)) + task.add_done_callback(self.tasks.discard) + self.tasks.add(task) + else: + # Pipelined HTTP requests need to be queued up. + self.flow.pause_reading() + self.pipeline.appendleft((self.cycle, app)) + + def on_body(self, body: bytes) -> None: + if self.parser.should_upgrade() or self.cycle.response_complete: + return + self.cycle.body += body + if len(self.cycle.body) > HIGH_WATER_LIMIT: + self.flow.pause_reading() + self.cycle.message_event.set() + + def on_message_complete(self) -> None: + if self.parser.should_upgrade() or self.cycle.response_complete: + return + self.cycle.more_body = False + self.cycle.message_event.set() + + def on_response_complete(self) -> None: + # Callback for pipelined HTTP requests to be started. + self.server_state.total_requests += 1 + + if self.transport.is_closing(): + return + + # Set a short Keep-Alive timeout. + self._unset_keepalive_if_required() + + self.timeout_keep_alive_task = self.loop.call_later( + self.timeout_keep_alive, self.timeout_keep_alive_handler + ) + + # Unpause data reads if needed. + self.flow.resume_reading() + + # Unblock any pipelined events. + if self.pipeline: + cycle, app = self.pipeline.pop() + task = self.loop.create_task(cycle.run_asgi(app)) + task.add_done_callback(self.tasks.discard) + self.tasks.add(task) + + def shutdown(self) -> None: + """ + Called by the server to commence a graceful shutdown. + """ + if self.cycle is None or self.cycle.response_complete: + self.transport.close() + else: + self.cycle.keep_alive = False + + def pause_writing(self) -> None: + """ + Called by the transport when the write buffer exceeds the high water mark. + """ + self.flow.pause_writing() + + def resume_writing(self) -> None: + """ + Called by the transport when the write buffer drops below the low water mark. + """ + self.flow.resume_writing() + + def timeout_keep_alive_handler(self) -> None: + """ + Called on a keep-alive connection if no new data is received after a short + delay. + """ + if not self.transport.is_closing(): + self.transport.close() + + +class RequestResponseCycle: + def __init__( + self, + scope: "HTTPScope", + transport: asyncio.Transport, + flow: FlowControl, + logger: logging.Logger, + access_logger: logging.Logger, + access_log: bool, + default_headers: List[Tuple[bytes, bytes]], + message_event: asyncio.Event, + expect_100_continue: bool, + keep_alive: bool, + on_response: Callable[..., None], + ): + self.scope = scope + self.transport = transport + self.flow = flow + self.logger = logger + self.access_logger = access_logger + self.access_log = access_log + self.default_headers = default_headers + self.message_event = message_event + self.on_response = on_response + + # Connection state + self.disconnected = False + self.keep_alive = keep_alive + self.waiting_for_100_continue = expect_100_continue + + # Request state + self.body = b"" + self.more_body = True + + # Response state + self.response_started = False + self.response_complete = False + self.chunked_encoding: Optional[bool] = None + self.expected_content_length = 0 + + # ASGI exception wrapper + async def run_asgi(self, app: "ASGI3Application") -> None: + try: + result = await app( # type: ignore[func-returns-value] + self.scope, self.receive, self.send + ) + except BaseException as exc: + msg = "Exception in ASGI application\n" + self.logger.error(msg, exc_info=exc) + if not self.response_started: + await self.send_500_response() + else: + self.transport.close() + else: + if result is not None: + msg = "ASGI callable should return None, but returned '%s'." + self.logger.error(msg, result) + self.transport.close() + elif not self.response_started and not self.disconnected: + msg = "ASGI callable returned without starting response." + self.logger.error(msg) + await self.send_500_response() + elif not self.response_complete and not self.disconnected: + msg = "ASGI callable returned without completing response." + self.logger.error(msg) + self.transport.close() + finally: + self.on_response = lambda: None + + async def send_500_response(self) -> None: + response_start_event: "HTTPResponseStartEvent" = { + "type": "http.response.start", + "status": 500, + "headers": [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ], + } + await self.send(response_start_event) + response_body_event: "HTTPResponseBodyEvent" = { + "type": "http.response.body", + "body": b"Internal Server Error", + "more_body": False, + } + await self.send(response_body_event) + + # ASGI interface + async def send(self, message: "ASGISendEvent") -> None: + message_type = message["type"] + + if self.flow.write_paused and not self.disconnected: + await self.flow.drain() + + if self.disconnected: + return + + if not self.response_started: + # Sending response status line and headers + if message_type != "http.response.start": + msg = "Expected ASGI message 'http.response.start', but got '%s'." + raise RuntimeError(msg % message_type) + message = cast("HTTPResponseStartEvent", message) + + self.response_started = True + self.waiting_for_100_continue = False + + status_code = message["status"] + headers = self.default_headers + list(message.get("headers", [])) + + if CLOSE_HEADER in self.scope["headers"] and CLOSE_HEADER not in headers: + headers = headers + [CLOSE_HEADER] + + if self.access_log: + self.access_logger.info( + '%s - "%s %s HTTP/%s" %d', + get_client_addr(self.scope), + self.scope["method"], + get_path_with_query_string(self.scope), + self.scope["http_version"], + status_code, + ) + + # Write response status line and headers + content = [STATUS_LINE[status_code]] + + for name, value in headers: + if HEADER_RE.search(name): + raise RuntimeError("Invalid HTTP header name.") + if HEADER_VALUE_RE.search(value): + raise RuntimeError("Invalid HTTP header value.") + + name = name.lower() + if name == b"content-length" and self.chunked_encoding is None: + self.expected_content_length = int(value.decode()) + self.chunked_encoding = False + elif name == b"transfer-encoding" and value.lower() == b"chunked": + self.expected_content_length = 0 + self.chunked_encoding = True + elif name == b"connection" and value.lower() == b"close": + self.keep_alive = False + content.extend([name, b": ", value, b"\r\n"]) + + if ( + self.chunked_encoding is None + and self.scope["method"] != "HEAD" + and status_code not in (204, 304) + ): + # Neither content-length nor transfer-encoding specified + self.chunked_encoding = True + content.append(b"transfer-encoding: chunked\r\n") + + content.append(b"\r\n") + self.transport.write(b"".join(content)) + + elif not self.response_complete: + # Sending response body + if message_type != "http.response.body": + msg = "Expected ASGI message 'http.response.body', but got '%s'." + raise RuntimeError(msg % message_type) + + body = cast(bytes, message.get("body", b"")) + more_body = message.get("more_body", False) + + # Write response body + if self.scope["method"] == "HEAD": + self.expected_content_length = 0 + elif self.chunked_encoding: + if body: + content = [b"%x\r\n" % len(body), body, b"\r\n"] + else: + content = [] + if not more_body: + content.append(b"0\r\n\r\n") + self.transport.write(b"".join(content)) + else: + num_bytes = len(body) + if num_bytes > self.expected_content_length: + raise RuntimeError("Response content longer than Content-Length") + else: + self.expected_content_length -= num_bytes + self.transport.write(body) + + # Handle response completion + if not more_body: + if self.expected_content_length != 0: + raise RuntimeError("Response content shorter than Content-Length") + self.response_complete = True + self.message_event.set() + if not self.keep_alive: + self.transport.close() + self.on_response() + + else: + # Response already sent + msg = "Unexpected ASGI message '%s' sent, after response already completed." + raise RuntimeError(msg % message_type) + + async def receive(self) -> "ASGIReceiveEvent": + if self.waiting_for_100_continue and not self.transport.is_closing(): + self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n") + self.waiting_for_100_continue = False + + if not self.disconnected and not self.response_complete: + self.flow.resume_reading() + await self.message_event.wait() + self.message_event.clear() + + message: "Union[HTTPDisconnectEvent, HTTPRequestEvent]" + if self.disconnected or self.response_complete: + message = {"type": "http.disconnect"} + else: + message = { + "type": "http.request", + "body": self.body, + "more_body": self.more_body, + } + self.body = b"" + + return message diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/utils.py new file mode 100644 index 00000000..fbd4b4d5 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/utils.py @@ -0,0 +1,55 @@ +import asyncio +import urllib.parse +from typing import TYPE_CHECKING, Optional, Tuple + +if TYPE_CHECKING: + from asgiref.typing import WWWScope + + +def get_remote_addr(transport: asyncio.Transport) -> Optional[Tuple[str, int]]: + socket_info = transport.get_extra_info("socket") + if socket_info is not None: + try: + info = socket_info.getpeername() + return (str(info[0]), int(info[1])) if isinstance(info, tuple) else None + except OSError: # pragma: no cover + # This case appears to inconsistently occur with uvloop + # bound to a unix domain socket. + return None + + info = transport.get_extra_info("peername") + if info is not None and isinstance(info, (list, tuple)) and len(info) == 2: + return (str(info[0]), int(info[1])) + return None + + +def get_local_addr(transport: asyncio.Transport) -> Optional[Tuple[str, int]]: + socket_info = transport.get_extra_info("socket") + if socket_info is not None: + info = socket_info.getsockname() + + return (str(info[0]), int(info[1])) if isinstance(info, tuple) else None + info = transport.get_extra_info("sockname") + if info is not None and isinstance(info, (list, tuple)) and len(info) == 2: + return (str(info[0]), int(info[1])) + return None + + +def is_ssl(transport: asyncio.Transport) -> bool: + return bool(transport.get_extra_info("sslcontext")) + + +def get_client_addr(scope: "WWWScope") -> str: + client = scope.get("client") + if not client: + return "" + return "%s:%d" % client + + +def get_path_with_query_string(scope: "WWWScope") -> str: + path_with_query_string = urllib.parse.quote(scope["path"]) + if scope["query_string"]: + path_with_query_string = "{}?{}".format( + path_with_query_string, scope["query_string"].decode("ascii") + ) + return path_with_query_string diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/auto.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/auto.py new file mode 100644 index 00000000..0dfba3bd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/auto.py @@ -0,0 +1,19 @@ +import asyncio +import typing + +AutoWebSocketsProtocol: typing.Optional[typing.Type[asyncio.Protocol]] +try: + import websockets # noqa +except ImportError: # pragma: no cover + try: + import wsproto # noqa + except ImportError: + AutoWebSocketsProtocol = None + else: + from uvicorn.protocols.websockets.wsproto_impl import WSProtocol + + AutoWebSocketsProtocol = WSProtocol +else: + from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol + + AutoWebSocketsProtocol = WebSocketProtocol diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/websockets_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/websockets_impl.py new file mode 100644 index 00000000..eea7a65b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/websockets_impl.py @@ -0,0 +1,350 @@ +import asyncio +import http +import logging +import sys +from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Tuple, Union, cast +from urllib.parse import unquote + +import websockets +from websockets.datastructures import Headers +from websockets.exceptions import ConnectionClosed +from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory +from websockets.legacy.server import HTTPResponse +from websockets.server import WebSocketServerProtocol +from websockets.typing import Subprotocol + +from uvicorn.config import Config +from uvicorn.logging import TRACE_LOG_LEVEL +from uvicorn.protocols.utils import get_local_addr, get_remote_addr, is_ssl +from uvicorn.server import ServerState + +if sys.version_info < (3, 8): + from typing_extensions import Literal +else: + from typing import Literal + +if TYPE_CHECKING: + from asgiref.typing import ( + ASGISendEvent, + WebSocketAcceptEvent, + WebSocketCloseEvent, + WebSocketConnectEvent, + WebSocketDisconnectEvent, + WebSocketReceiveEvent, + WebSocketScope, + WebSocketSendEvent, + ) + + +class Server: + closing = False + + def register(self, ws: WebSocketServerProtocol) -> None: + pass + + def unregister(self, ws: WebSocketServerProtocol) -> None: + pass + + def is_serving(self) -> bool: + return not self.closing + + +class WebSocketProtocol(WebSocketServerProtocol): + extra_headers: List[Tuple[str, str]] + + def __init__( + self, + config: Config, + server_state: ServerState, + _loop: Optional[asyncio.AbstractEventLoop] = None, + ): + if not config.loaded: + config.load() + + self.config = config + self.app = config.loaded_app + self.loop = _loop or asyncio.get_event_loop() + self.root_path = config.root_path + + # Shared server state + self.connections = server_state.connections + self.tasks = server_state.tasks + + # Connection state + self.transport: asyncio.Transport = None # type: ignore[assignment] + self.server: Optional[Tuple[str, int]] = None + self.client: Optional[Tuple[str, int]] = None + self.scheme: Literal["wss", "ws"] = None # type: ignore[assignment] + + # Connection events + self.scope: WebSocketScope = None # type: ignore[assignment] + self.handshake_started_event = asyncio.Event() + self.handshake_completed_event = asyncio.Event() + self.closed_event = asyncio.Event() + self.initial_response: Optional[HTTPResponse] = None + self.connect_sent = False + self.accepted_subprotocol: Optional[Subprotocol] = None + self.transfer_data_task: asyncio.Task = None # type: ignore[assignment] + + self.ws_server: Server = Server() # type: ignore[assignment] + + extensions = [] + if self.config.ws_per_message_deflate: + extensions.append(ServerPerMessageDeflateFactory()) + + super().__init__( + ws_handler=self.ws_handler, + ws_server=self.ws_server, # type: ignore[arg-type] + max_size=self.config.ws_max_size, + ping_interval=self.config.ws_ping_interval, + ping_timeout=self.config.ws_ping_timeout, + extensions=extensions, + logger=logging.getLogger("uvicorn.error"), + extra_headers=[], + ) + + def connection_made( # type: ignore[override] + self, transport: asyncio.Transport + ) -> None: + self.connections.add(self) + self.transport = transport + self.server = get_local_addr(transport) + self.client = get_remote_addr(transport) + self.scheme = "wss" if is_ssl(transport) else "ws" + + if self.logger.isEnabledFor(TRACE_LOG_LEVEL): + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix) + + super().connection_made(transport) + + def connection_lost(self, exc: Optional[Exception]) -> None: + self.connections.remove(self) + + if self.logger.isEnabledFor(TRACE_LOG_LEVEL): + prefix = "%s:%d - " % self.client if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix) + + self.handshake_completed_event.set() + super().connection_lost(exc) + if exc is None: + self.transport.close() + + def shutdown(self) -> None: + self.ws_server.closing = True + self.transport.close() + + def on_task_complete(self, task: asyncio.Task) -> None: + self.tasks.discard(task) + + async def process_request( + self, path: str, headers: Headers + ) -> Optional[HTTPResponse]: + """ + This hook is called to determine if the websocket should return + an HTTP response and close. + + Our behavior here is to start the ASGI application, and then wait + for either `accept` or `close` in order to determine if we should + close the connection. + """ + path_portion, _, query_string = path.partition("?") + + websockets.legacy.handshake.check_request(headers) + + subprotocols = [] + for header in headers.get_all("Sec-WebSocket-Protocol"): + subprotocols.extend([token.strip() for token in header.split(",")]) + + asgi_headers = [ + (name.encode("ascii"), value.encode("ascii")) + for name, value in headers.raw_items() + ] + + self.scope = { # type: ignore[typeddict-item] + "type": "websocket", + "asgi": {"version": self.config.asgi_version, "spec_version": "2.3"}, + "http_version": "1.1", + "scheme": self.scheme, + "server": self.server, + "client": self.client, + "root_path": self.root_path, + "path": unquote(path_portion), + "raw_path": path_portion.encode("ascii"), + "query_string": query_string.encode("ascii"), + "headers": asgi_headers, + "subprotocols": subprotocols, + } + task = self.loop.create_task(self.run_asgi()) + task.add_done_callback(self.on_task_complete) + self.tasks.add(task) + await self.handshake_started_event.wait() + return self.initial_response + + def process_subprotocol( + self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: + """ + We override the standard 'process_subprotocol' behavior here so that + we return whatever subprotocol is sent in the 'accept' message. + """ + return self.accepted_subprotocol + + def send_500_response(self) -> None: + msg = b"Internal Server Error" + content = [ + b"HTTP/1.1 500 Internal Server Error\r\n" + b"content-type: text/plain; charset=utf-8\r\n", + b"content-length: " + str(len(msg)).encode("ascii") + b"\r\n", + b"connection: close\r\n", + b"\r\n", + msg, + ] + self.transport.write(b"".join(content)) + # Allow handler task to terminate cleanly, as websockets doesn't cancel it by + # itself (see https://github.com/encode/uvicorn/issues/920) + self.handshake_started_event.set() + + async def ws_handler( # type: ignore[override] + self, protocol: WebSocketServerProtocol, path: str + ) -> Any: + """ + This is the main handler function for the 'websockets' implementation + to call into. We just wait for close then return, and instead allow + 'send' and 'receive' events to drive the flow. + """ + self.handshake_completed_event.set() + await self.closed_event.wait() + + async def run_asgi(self) -> None: + """ + Wrapper around the ASGI callable, handling exceptions and unexpected + termination states. + """ + try: + result = await self.app(self.scope, self.asgi_receive, self.asgi_send) + except BaseException as exc: + self.closed_event.set() + msg = "Exception in ASGI application\n" + self.logger.error(msg, exc_info=exc) + if not self.handshake_started_event.is_set(): + self.send_500_response() + else: + await self.handshake_completed_event.wait() + self.transport.close() + else: + self.closed_event.set() + if not self.handshake_started_event.is_set(): + msg = "ASGI callable returned without sending handshake." + self.logger.error(msg) + self.send_500_response() + self.transport.close() + elif result is not None: + msg = "ASGI callable should return None, but returned '%s'." + self.logger.error(msg, result) + await self.handshake_completed_event.wait() + self.transport.close() + + async def asgi_send(self, message: "ASGISendEvent") -> None: + message_type = message["type"] + + if not self.handshake_started_event.is_set(): + if message_type == "websocket.accept": + message = cast("WebSocketAcceptEvent", message) + self.logger.info( + '%s - "WebSocket %s" [accepted]', + self.scope["client"], + self.scope["path"], + ) + self.initial_response = None + self.accepted_subprotocol = cast( + Optional[Subprotocol], message.get("subprotocol") + ) + if "headers" in message: + self.extra_headers.extend( + # ASGI spec requires bytes + # But for compatibility we need to convert it to strings + (name.decode("latin-1"), value.decode("latin-1")) + for name, value in message["headers"] + ) + self.handshake_started_event.set() + + elif message_type == "websocket.close": + message = cast("WebSocketCloseEvent", message) + self.logger.info( + '%s - "WebSocket %s" 403', + self.scope["client"], + self.scope["path"], + ) + self.initial_response = (http.HTTPStatus.FORBIDDEN, [], b"") + self.handshake_started_event.set() + self.closed_event.set() + + else: + msg = ( + "Expected ASGI message 'websocket.accept' or 'websocket.close', " + "but got '%s'." + ) + raise RuntimeError(msg % message_type) + + elif not self.closed_event.is_set(): + await self.handshake_completed_event.wait() + + if message_type == "websocket.send": + message = cast("WebSocketSendEvent", message) + bytes_data = message.get("bytes") + text_data = message.get("text") + data = text_data if bytes_data is None else bytes_data + await self.send(data) # type: ignore[arg-type] + + elif message_type == "websocket.close": + message = cast("WebSocketCloseEvent", message) + code = message.get("code", 1000) + reason = message.get("reason", "") or "" + await self.close(code, reason) + self.closed_event.set() + + else: + msg = ( + "Expected ASGI message 'websocket.send' or 'websocket.close'," + " but got '%s'." + ) + raise RuntimeError(msg % message_type) + + else: + msg = "Unexpected ASGI message '%s', after sending 'websocket.close'." + raise RuntimeError(msg % message_type) + + async def asgi_receive( + self, + ) -> Union[ + "WebSocketDisconnectEvent", "WebSocketConnectEvent", "WebSocketReceiveEvent" + ]: + if not self.connect_sent: + self.connect_sent = True + return {"type": "websocket.connect"} + + await self.handshake_completed_event.wait() + + if self.closed_event.is_set(): + # If client disconnected, use WebSocketServerProtocol.close_code property. + # If the handshake failed or the app closed before handshake completion, + # use 1006 Abnormal Closure. + return {"type": "websocket.disconnect", "code": self.close_code or 1006} + + try: + data = await self.recv() + except ConnectionClosed as exc: + self.closed_event.set() + return {"type": "websocket.disconnect", "code": exc.code} + + msg: WebSocketReceiveEvent = { # type: ignore[typeddict-item] + "type": "websocket.receive" + } + + if isinstance(data, str): + msg["text"] = data + else: + msg["bytes"] = data + + return msg diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/wsproto_impl.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/wsproto_impl.py new file mode 100644 index 00000000..eb8d860b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/protocols/websockets/wsproto_impl.py @@ -0,0 +1,322 @@ +import asyncio +import logging +from urllib.parse import unquote + +import h11 +import wsproto +from wsproto import ConnectionType, events +from wsproto.connection import ConnectionState +from wsproto.extensions import PerMessageDeflate +from wsproto.utilities import RemoteProtocolError + +from uvicorn.logging import TRACE_LOG_LEVEL +from uvicorn.protocols.utils import get_local_addr, get_remote_addr, is_ssl + + +class WSProtocol(asyncio.Protocol): + def __init__(self, config, server_state, _loop=None): + if not config.loaded: + config.load() + + self.config = config + self.app = config.loaded_app + self.loop = _loop or asyncio.get_event_loop() + self.logger = logging.getLogger("uvicorn.error") + self.root_path = config.root_path + + # Shared server state + self.connections = server_state.connections + self.tasks = server_state.tasks + + # Connection state + self.transport = None + self.server = None + self.client = None + self.scheme = None + + # WebSocket state + self.connect_event = None + self.queue = asyncio.Queue() + self.handshake_complete = False + self.close_sent = False + + self.conn = wsproto.WSConnection(connection_type=ConnectionType.SERVER) + + self.read_paused = False + self.writable = asyncio.Event() + self.writable.set() + + # Buffers + self.bytes = b"" + self.text = "" + + # Protocol interface + + def connection_made(self, transport): + self.connections.add(self) + self.transport = transport + self.server = get_local_addr(transport) + self.client = get_remote_addr(transport) + self.scheme = "wss" if is_ssl(transport) else "ws" + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % tuple(self.client) if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix) + + def connection_lost(self, exc): + if exc is not None: + self.queue.put_nowait({"type": "websocket.disconnect"}) + self.connections.remove(self) + + if self.logger.level <= TRACE_LOG_LEVEL: + prefix = "%s:%d - " % tuple(self.client) if self.client else "" + self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix) + + if exc is None: + self.transport.close() + + def eof_received(self): + pass + + def data_received(self, data): + try: + self.conn.receive_data(data) + except RemoteProtocolError as err: + if err.event_hint is not None: + self.transport.write(self.conn.send(err.event_hint)) + self.transport.close() + else: + self.handle_no_connect(events.CloseConnection()) + else: + self.handle_events() + + def handle_events(self): + for event in self.conn.events(): + if isinstance(event, events.Request): + self.handle_connect(event) + elif isinstance(event, events.TextMessage): + self.handle_text(event) + elif isinstance(event, events.BytesMessage): + self.handle_bytes(event) + elif isinstance(event, events.RejectConnection): + self.handle_no_connect(event) + elif isinstance(event, events.RejectData): + self.handle_no_connect(event) + elif isinstance(event, events.CloseConnection): + self.handle_close(event) + elif isinstance(event, events.Ping): + self.handle_ping(event) + + def pause_writing(self): + """ + Called by the transport when the write buffer exceeds the high water mark. + """ + self.writable.clear() + + def resume_writing(self): + """ + Called by the transport when the write buffer drops below the low water mark. + """ + self.writable.set() + + def shutdown(self): + self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012}) + output = self.conn.send(wsproto.events.CloseConnection(code=1012)) + self.transport.write(output) + self.transport.close() + + def on_task_complete(self, task): + self.tasks.discard(task) + + # Event handlers + + def handle_connect(self, event): + self.connect_event = event + headers = [(b"host", event.host.encode())] + headers += [(key.lower(), value) for key, value in event.extra_headers] + raw_path, _, query_string = event.target.partition("?") + self.scope = { + "type": "websocket", + "asgi": {"version": self.config.asgi_version, "spec_version": "2.3"}, + "http_version": "1.1", + "scheme": self.scheme, + "server": self.server, + "client": self.client, + "root_path": self.root_path, + "path": unquote(raw_path), + "raw_path": raw_path.encode("ascii"), + "query_string": query_string.encode("ascii"), + "headers": headers, + "subprotocols": event.subprotocols, + } + self.queue.put_nowait({"type": "websocket.connect"}) + task = self.loop.create_task(self.run_asgi()) + task.add_done_callback(self.on_task_complete) + self.tasks.add(task) + + def handle_no_connect(self, event): + headers = [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ] + msg = h11.Response(status_code=400, headers=headers, reason="Bad Request") + output = self.conn.send(msg) + msg = h11.Data(data=event.reason.encode("utf-8")) + output += self.conn.send(msg) + msg = h11.EndOfMessage() + output += self.conn.send(msg) + self.transport.write(output) + self.transport.close() + + def handle_text(self, event): + self.text += event.data + if event.message_finished: + self.queue.put_nowait({"type": "websocket.receive", "text": self.text}) + self.text = "" + if not self.read_paused: + self.read_paused = True + self.transport.pause_reading() + + def handle_bytes(self, event): + self.bytes += event.data + # todo: we may want to guard the size of self.bytes and self.text + if event.message_finished: + self.queue.put_nowait({"type": "websocket.receive", "bytes": self.bytes}) + self.bytes = b"" + if not self.read_paused: + self.read_paused = True + self.transport.pause_reading() + + def handle_close(self, event): + if self.conn.state == ConnectionState.REMOTE_CLOSING: + self.transport.write(self.conn.send(event.response())) + self.queue.put_nowait({"type": "websocket.disconnect", "code": event.code}) + self.transport.close() + + def handle_ping(self, event): + self.transport.write(self.conn.send(event.response())) + + def send_500_response(self): + headers = [ + (b"content-type", b"text/plain; charset=utf-8"), + (b"connection", b"close"), + ] + if self.conn.connection is None: + output = self.conn.send(wsproto.events.RejectConnection(status_code=500)) + else: + msg = h11.Response( + status_code=500, headers=headers, reason="Internal Server Error" + ) + output = self.conn.send(msg) + msg = h11.Data(data=b"Internal Server Error") + output += self.conn.send(msg) + msg = h11.EndOfMessage() + output += self.conn.send(msg) + self.transport.write(output) + + async def run_asgi(self): + try: + result = await self.app(self.scope, self.receive, self.send) + except BaseException as exc: + msg = "Exception in ASGI application\n" + self.logger.error(msg, exc_info=exc) + if not self.handshake_complete: + self.send_500_response() + self.transport.close() + else: + if not self.handshake_complete: + msg = "ASGI callable returned without completing handshake." + self.logger.error(msg) + self.send_500_response() + self.transport.close() + elif result is not None: + msg = "ASGI callable should return None, but returned '%s'." + self.logger.error(msg, result) + self.transport.close() + + async def send(self, message): + await self.writable.wait() + + message_type = message["type"] + + if not self.handshake_complete: + if message_type == "websocket.accept": + self.logger.info( + '%s - "WebSocket %s" [accepted]', + self.scope["client"], + self.scope["path"], + ) + self.handshake_complete = True + subprotocol = message.get("subprotocol") + extra_headers = message.get("headers", []) + extensions = [] + if self.config.ws_per_message_deflate: + extensions.append(PerMessageDeflate()) + output = self.conn.send( + wsproto.events.AcceptConnection( + subprotocol=subprotocol, + extensions=extensions, + extra_headers=extra_headers, + ) + ) + self.transport.write(output) + + elif message_type == "websocket.close": + self.queue.put_nowait({"type": "websocket.disconnect", "code": None}) + self.logger.info( + '%s - "WebSocket %s" 403', + self.scope["client"], + self.scope["path"], + ) + self.handshake_complete = True + self.close_sent = True + msg = events.RejectConnection(status_code=403, headers=[]) + output = self.conn.send(msg) + self.transport.write(output) + self.transport.close() + + else: + msg = ( + "Expected ASGI message 'websocket.accept' or 'websocket.close', " + "but got '%s'." + ) + raise RuntimeError(msg % message_type) + + elif not self.close_sent: + if message_type == "websocket.send": + bytes_data = message.get("bytes") + text_data = message.get("text") + data = text_data if bytes_data is None else bytes_data + output = self.conn.send(wsproto.events.Message(data=data)) + if not self.transport.is_closing(): + self.transport.write(output) + + elif message_type == "websocket.close": + self.close_sent = True + code = message.get("code", 1000) + reason = message.get("reason", "") or "" + self.queue.put_nowait({"type": "websocket.disconnect", "code": code}) + output = self.conn.send( + wsproto.events.CloseConnection(code=code, reason=reason) + ) + if not self.transport.is_closing(): + self.transport.write(output) + self.transport.close() + + else: + msg = ( + "Expected ASGI message 'websocket.send' or 'websocket.close'," + " but got '%s'." + ) + raise RuntimeError(msg % message_type) + + else: + msg = "Unexpected ASGI message '%s', after sending 'websocket.close'." + raise RuntimeError(msg % message_type) + + async def receive(self): + message = await self.queue.get() + if self.read_paused and self.queue.empty(): + self.read_paused = False + self.transport.resume_reading() + return message diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/server.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/server.py new file mode 100644 index 00000000..494e2b65 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/server.py @@ -0,0 +1,306 @@ +import asyncio +import functools +import logging +import os +import platform +import signal +import socket +import sys +import threading +import time +from email.utils import formatdate +from types import FrameType +from typing import TYPE_CHECKING, List, Optional, Sequence, Set, Tuple, Union + +import click + +from uvicorn.config import Config + +if TYPE_CHECKING: + from uvicorn.protocols.http.h11_impl import H11Protocol + from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol + from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol + from uvicorn.protocols.websockets.wsproto_impl import WSProtocol + + Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol] + + +HANDLED_SIGNALS = ( + signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. + signal.SIGTERM, # Unix signal 15. Sent by `kill `. +) + +logger = logging.getLogger("uvicorn.error") + + +class ServerState: + """ + Shared servers state that is available between all protocol instances. + """ + + def __init__(self) -> None: + self.total_requests = 0 + self.connections: Set["Protocols"] = set() + self.tasks: Set[asyncio.Task] = set() + self.default_headers: List[Tuple[bytes, bytes]] = [] + + +class Server: + def __init__(self, config: Config) -> None: + self.config = config + self.server_state = ServerState() + + self.started = False + self.should_exit = False + self.force_exit = False + self.last_notified = 0.0 + + def run(self, sockets: Optional[List[socket.socket]] = None) -> None: + self.config.setup_event_loop() + return asyncio.run(self.serve(sockets=sockets)) + + async def serve(self, sockets: Optional[List[socket.socket]] = None) -> None: + process_id = os.getpid() + + config = self.config + if not config.loaded: + config.load() + + self.lifespan = config.lifespan_class(config) + + self.install_signal_handlers() + + message = "Started server process [%d]" + color_message = "Started server process [" + click.style("%d", fg="cyan") + "]" + logger.info(message, process_id, extra={"color_message": color_message}) + + await self.startup(sockets=sockets) + if self.should_exit: + return + await self.main_loop() + await self.shutdown(sockets=sockets) + + message = "Finished server process [%d]" + color_message = "Finished server process [" + click.style("%d", fg="cyan") + "]" + logger.info(message, process_id, extra={"color_message": color_message}) + + async def startup(self, sockets: list = None) -> None: + await self.lifespan.startup() + if self.lifespan.should_exit: + self.should_exit = True + return + + config = self.config + + create_protocol = functools.partial( + config.http_protocol_class, config=config, server_state=self.server_state + ) + loop = asyncio.get_running_loop() + + listeners: Sequence[socket.SocketType] + if sockets is not None: + # Explicitly passed a list of open sockets. + # We use this when the server is run from a Gunicorn worker. + + def _share_socket(sock: socket.SocketType) -> socket.SocketType: + # Windows requires the socket be explicitly shared across + # multiple workers (processes). + from socket import fromshare # type: ignore + + sock_data = sock.share(os.getpid()) # type: ignore + return fromshare(sock_data) + + self.servers = [] + for sock in sockets: + if config.workers > 1 and platform.system() == "Windows": + sock = _share_socket(sock) + server = await loop.create_server( + create_protocol, sock=sock, ssl=config.ssl, backlog=config.backlog + ) + self.servers.append(server) + listeners = sockets + + elif config.fd is not None: + # Use an existing socket, from a file descriptor. + sock = socket.fromfd(config.fd, socket.AF_UNIX, socket.SOCK_STREAM) + server = await loop.create_server( + create_protocol, sock=sock, ssl=config.ssl, backlog=config.backlog + ) + assert server.sockets is not None # mypy + listeners = server.sockets + self.servers = [server] + + elif config.uds is not None: + # Create a socket using UNIX domain socket. + uds_perms = 0o666 + if os.path.exists(config.uds): + uds_perms = os.stat(config.uds).st_mode + server = await loop.create_unix_server( + create_protocol, path=config.uds, ssl=config.ssl, backlog=config.backlog + ) + os.chmod(config.uds, uds_perms) + assert server.sockets is not None # mypy + listeners = server.sockets + self.servers = [server] + + else: + # Standard case. Create a socket from a host/port pair. + try: + server = await loop.create_server( + create_protocol, + host=config.host, + port=config.port, + ssl=config.ssl, + backlog=config.backlog, + ) + except OSError as exc: + logger.error(exc) + await self.lifespan.shutdown() + sys.exit(1) + + assert server.sockets is not None + listeners = server.sockets + self.servers = [server] + + if sockets is None: + self._log_started_message(listeners) + else: + # We're most likely running multiple workers, so a message has already been + # logged by `config.bind_socket()`. + pass + + self.started = True + + def _log_started_message(self, listeners: Sequence[socket.SocketType]) -> None: + config = self.config + + if config.fd is not None: + sock = listeners[0] + logger.info( + "Uvicorn running on socket %s (Press CTRL+C to quit)", + sock.getsockname(), + ) + + elif config.uds is not None: + logger.info( + "Uvicorn running on unix socket %s (Press CTRL+C to quit)", config.uds + ) + + else: + addr_format = "%s://%s:%d" + host = "0.0.0.0" if config.host is None else config.host + if ":" in host: + # It's an IPv6 address. + addr_format = "%s://[%s]:%d" + + port = config.port + if port == 0: + port = listeners[0].getsockname()[1] + + protocol_name = "https" if config.ssl else "http" + message = f"Uvicorn running on {addr_format} (Press CTRL+C to quit)" + color_message = ( + "Uvicorn running on " + + click.style(addr_format, bold=True) + + " (Press CTRL+C to quit)" + ) + logger.info( + message, + protocol_name, + host, + port, + extra={"color_message": color_message}, + ) + + async def main_loop(self) -> None: + counter = 0 + should_exit = await self.on_tick(counter) + while not should_exit: + counter += 1 + counter = counter % 864000 + await asyncio.sleep(0.1) + should_exit = await self.on_tick(counter) + + async def on_tick(self, counter: int) -> bool: + # Update the default headers, once per second. + if counter % 10 == 0: + current_time = time.time() + current_date = formatdate(current_time, usegmt=True).encode() + + if self.config.date_header: + date_header = [(b"date", current_date)] + else: + date_header = [] + + self.server_state.default_headers = ( + date_header + self.config.encoded_headers + ) + + # Callback to `callback_notify` once every `timeout_notify` seconds. + if self.config.callback_notify is not None: + if current_time - self.last_notified > self.config.timeout_notify: + self.last_notified = current_time + await self.config.callback_notify() + + # Determine if we should exit. + if self.should_exit: + return True + if self.config.limit_max_requests is not None: + return self.server_state.total_requests >= self.config.limit_max_requests + return False + + async def shutdown(self, sockets: Optional[List[socket.socket]] = None) -> None: + logger.info("Shutting down") + + # Stop accepting new connections. + for server in self.servers: + server.close() + for sock in sockets or []: + sock.close() + for server in self.servers: + await server.wait_closed() + + # Request shutdown on all existing connections. + for connection in list(self.server_state.connections): + connection.shutdown() + await asyncio.sleep(0.1) + + # Wait for existing connections to finish sending responses. + if self.server_state.connections and not self.force_exit: + msg = "Waiting for connections to close. (CTRL+C to force quit)" + logger.info(msg) + while self.server_state.connections and not self.force_exit: + await asyncio.sleep(0.1) + + # Wait for existing tasks to complete. + if self.server_state.tasks and not self.force_exit: + msg = "Waiting for background tasks to complete. (CTRL+C to force quit)" + logger.info(msg) + while self.server_state.tasks and not self.force_exit: + await asyncio.sleep(0.1) + + # Send the lifespan shutdown event, and wait for application shutdown. + if not self.force_exit: + await self.lifespan.shutdown() + + def install_signal_handlers(self) -> None: + if threading.current_thread() is not threading.main_thread(): + # Signals can only be listened to from the main thread. + return + + loop = asyncio.get_event_loop() + + try: + for sig in HANDLED_SIGNALS: + loop.add_signal_handler(sig, self.handle_exit, sig, None) + except NotImplementedError: # pragma: no cover + # Windows + for sig in HANDLED_SIGNALS: + signal.signal(sig, self.handle_exit) + + def handle_exit(self, sig: int, frame: Optional[FrameType]) -> None: + + if self.should_exit and sig == signal.SIGINT: + self.force_exit = True + else: + self.should_exit = True diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/__init__.py new file mode 100644 index 00000000..deaf12ed --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/__init__.py @@ -0,0 +1,21 @@ +from typing import TYPE_CHECKING, Type + +from uvicorn.supervisors.basereload import BaseReload +from uvicorn.supervisors.multiprocess import Multiprocess + +if TYPE_CHECKING: + ChangeReload: Type[BaseReload] +else: + try: + from uvicorn.supervisors.watchfilesreload import ( + WatchFilesReload as ChangeReload, + ) + except ImportError: # pragma: no cover + try: + from uvicorn.supervisors.watchgodreload import ( + WatchGodReload as ChangeReload, + ) + except ImportError: + from uvicorn.supervisors.statreload import StatReload as ChangeReload + +__all__ = ["Multiprocess", "ChangeReload"] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/basereload.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/basereload.py new file mode 100644 index 00000000..b2d80d3e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/basereload.py @@ -0,0 +1,113 @@ +import logging +import os +import signal +import threading +from pathlib import Path +from socket import socket +from types import FrameType +from typing import Callable, Iterator, List, Optional + +import click + +from uvicorn._subprocess import get_subprocess +from uvicorn.config import Config + +HANDLED_SIGNALS = ( + signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. + signal.SIGTERM, # Unix signal 15. Sent by `kill `. +) + +logger = logging.getLogger("uvicorn.error") + + +class BaseReload: + def __init__( + self, + config: Config, + target: Callable[[Optional[List[socket]]], None], + sockets: List[socket], + ) -> None: + self.config = config + self.target = target + self.sockets = sockets + self.should_exit = threading.Event() + self.pid = os.getpid() + self.reloader_name: Optional[str] = None + + def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None: + """ + A signal handler that is registered with the parent process. + """ + self.should_exit.set() + + def run(self) -> None: + self.startup() + for changes in self: + if changes: + logger.warning( + "%s detected changes in %s. Reloading...", + self.reloader_name, + ", ".join(map(_display_path, changes)), + ) + self.restart() + + self.shutdown() + + def pause(self) -> None: + if self.should_exit.wait(self.config.reload_delay): + raise StopIteration() + + def __iter__(self) -> Iterator[Optional[List[Path]]]: + return self + + def __next__(self) -> Optional[List[Path]]: + return self.should_restart() + + def startup(self) -> None: + message = f"Started reloader process [{self.pid}] using {self.reloader_name}" + color_message = "Started reloader process [{}] using {}".format( + click.style(str(self.pid), fg="cyan", bold=True), + click.style(str(self.reloader_name), fg="cyan", bold=True), + ) + logger.info(message, extra={"color_message": color_message}) + + for sig in HANDLED_SIGNALS: + signal.signal(sig, self.signal_handler) + + self.process = get_subprocess( + config=self.config, target=self.target, sockets=self.sockets + ) + self.process.start() + + def restart(self) -> None: + + self.process.terminate() + self.process.join() + + self.process = get_subprocess( + config=self.config, target=self.target, sockets=self.sockets + ) + self.process.start() + + def shutdown(self) -> None: + self.process.terminate() + self.process.join() + + for sock in self.sockets: + sock.close() + + message = "Stopping reloader process [{}]".format(str(self.pid)) + color_message = "Stopping reloader process [{}]".format( + click.style(str(self.pid), fg="cyan", bold=True) + ) + logger.info(message, extra={"color_message": color_message}) + + def should_restart(self) -> Optional[List[Path]]: + raise NotImplementedError("Reload strategies should override should_restart()") + + +def _display_path(path: Path) -> str: + try: + return f"'{path.relative_to(Path.cwd())}'" + except ValueError: + return f"'{path}'" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/multiprocess.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/multiprocess.py new file mode 100644 index 00000000..9ab4a742 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/multiprocess.py @@ -0,0 +1,74 @@ +import logging +import os +import signal +import threading +from multiprocessing.context import SpawnProcess +from socket import socket +from types import FrameType +from typing import Callable, List, Optional + +import click + +from uvicorn._subprocess import get_subprocess +from uvicorn.config import Config + +HANDLED_SIGNALS = ( + signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. + signal.SIGTERM, # Unix signal 15. Sent by `kill `. +) + +logger = logging.getLogger("uvicorn.error") + + +class Multiprocess: + def __init__( + self, + config: Config, + target: Callable[[Optional[List[socket]]], None], + sockets: List[socket], + ) -> None: + self.config = config + self.target = target + self.sockets = sockets + self.processes: List[SpawnProcess] = [] + self.should_exit = threading.Event() + self.pid = os.getpid() + + def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None: + """ + A signal handler that is registered with the parent process. + """ + self.should_exit.set() + + def run(self) -> None: + self.startup() + self.should_exit.wait() + self.shutdown() + + def startup(self) -> None: + message = "Started parent process [{}]".format(str(self.pid)) + color_message = "Started parent process [{}]".format( + click.style(str(self.pid), fg="cyan", bold=True) + ) + logger.info(message, extra={"color_message": color_message}) + + for sig in HANDLED_SIGNALS: + signal.signal(sig, self.signal_handler) + + for idx in range(self.config.workers): + process = get_subprocess( + config=self.config, target=self.target, sockets=self.sockets + ) + process.start() + self.processes.append(process) + + def shutdown(self) -> None: + for process in self.processes: + process.terminate() + process.join() + + message = "Stopping parent process [{}]".format(str(self.pid)) + color_message = "Stopping parent process [{}]".format( + click.style(str(self.pid), fg="cyan", bold=True) + ) + logger.info(message, extra={"color_message": color_message}) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/statreload.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/statreload.py new file mode 100644 index 00000000..7cbcce31 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/statreload.py @@ -0,0 +1,53 @@ +import logging +from pathlib import Path +from socket import socket +from typing import Callable, Dict, Iterator, List, Optional + +from uvicorn.config import Config +from uvicorn.supervisors.basereload import BaseReload + +logger = logging.getLogger("uvicorn.error") + + +class StatReload(BaseReload): + def __init__( + self, + config: Config, + target: Callable[[Optional[List[socket]]], None], + sockets: List[socket], + ) -> None: + super().__init__(config, target, sockets) + self.reloader_name = "StatReload" + self.mtimes: Dict[Path, float] = {} + + if config.reload_excludes or config.reload_includes: + logger.warning( + "--reload-include and --reload-exclude have no effect unless " + "watchfiles is installed." + ) + + def should_restart(self) -> Optional[List[Path]]: + self.pause() + + for file in self.iter_py_files(): + try: + mtime = file.stat().st_mtime + except OSError: # pragma: nocover + continue + + old_time = self.mtimes.get(file) + if old_time is None: + self.mtimes[file] = mtime + continue + elif mtime > old_time: + return [file] + return None + + def restart(self) -> None: + self.mtimes = {} + return super().restart() + + def iter_py_files(self) -> Iterator[Path]: + for reload_dir in self.config.reload_dirs: + for path in list(reload_dir.rglob("*.py")): + yield path.resolve() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchfilesreload.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchfilesreload.py new file mode 100644 index 00000000..9ba10ce9 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchfilesreload.py @@ -0,0 +1,89 @@ +from pathlib import Path +from socket import socket +from typing import Callable, List, Optional + +from watchfiles import watch + +from uvicorn.config import Config +from uvicorn.supervisors.basereload import BaseReload + + +class FileFilter: + def __init__(self, config: Config): + default_includes = ["*.py"] + self.includes = [ + default + for default in default_includes + if default not in config.reload_excludes + ] + self.includes.extend(config.reload_includes) + self.includes = list(set(self.includes)) + + default_excludes = [".*", ".py[cod]", ".sw.*", "~*"] + self.excludes = [ + default + for default in default_excludes + if default not in config.reload_includes + ] + self.exclude_dirs = [] + for e in config.reload_excludes: + p = Path(e) + try: + is_dir = p.is_dir() + except OSError: # pragma: no cover + # gets raised on Windows for values like "*.py" + is_dir = False + + if is_dir: + self.exclude_dirs.append(p) + else: + self.excludes.append(e) + self.excludes = list(set(self.excludes)) + + def __call__(self, path: Path) -> bool: + for include_pattern in self.includes: + if path.match(include_pattern): + for exclude_dir in self.exclude_dirs: + if exclude_dir in path.parents: + return False + + for exclude_pattern in self.excludes: + if path.match(exclude_pattern): + return False + + return True + return False + + +class WatchFilesReload(BaseReload): + def __init__( + self, + config: Config, + target: Callable[[Optional[List[socket]]], None], + sockets: List[socket], + ) -> None: + super().__init__(config, target, sockets) + self.reloader_name = "WatchFiles" + self.reload_dirs = [] + for directory in config.reload_dirs: + if Path.cwd() not in directory.parents: + self.reload_dirs.append(directory) + if Path.cwd() not in self.reload_dirs: + self.reload_dirs.append(Path.cwd()) + + self.watch_filter = FileFilter(config) + self.watcher = watch( + *self.reload_dirs, + watch_filter=None, + stop_event=self.should_exit, + # using yield_on_timeout here mostly to make sure tests don't + # hang forever, won't affect the class's behavior + yield_on_timeout=True, + ) + + def should_restart(self) -> Optional[List[Path]]: + changes = next(self.watcher) + if changes: + unique_paths = {Path(c[1]) for c in changes} + return [p for p in unique_paths if self.watch_filter(p)] + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchgodreload.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchgodreload.py new file mode 100644 index 00000000..1ec3dc59 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/supervisors/watchgodreload.py @@ -0,0 +1,158 @@ +import logging +import warnings +from pathlib import Path +from socket import socket +from typing import TYPE_CHECKING, Callable, Dict, List, Optional + +from watchgod import DefaultWatcher + +from uvicorn.config import Config +from uvicorn.supervisors.basereload import BaseReload + +if TYPE_CHECKING: + import os + + DirEntry = os.DirEntry[str] + +logger = logging.getLogger("uvicorn.error") + + +class CustomWatcher(DefaultWatcher): + def __init__(self, root_path: Path, config: Config): + default_includes = ["*.py"] + self.includes = [ + default + for default in default_includes + if default not in config.reload_excludes + ] + self.includes.extend(config.reload_includes) + self.includes = list(set(self.includes)) + + default_excludes = [".*", ".py[cod]", ".sw.*", "~*"] + self.excludes = [ + default + for default in default_excludes + if default not in config.reload_includes + ] + self.excludes.extend(config.reload_excludes) + self.excludes = list(set(self.excludes)) + + self.watched_dirs: Dict[str, bool] = {} + self.watched_files: Dict[str, bool] = {} + self.dirs_includes = set(config.reload_dirs) + self.dirs_excludes = set(config.reload_dirs_excludes) + self.resolved_root = root_path + super().__init__(str(root_path)) + + def should_watch_file(self, entry: "DirEntry") -> bool: + cached_result = self.watched_files.get(entry.path) + if cached_result is not None: + return cached_result + + entry_path = Path(entry) + + # cwd is not verified through should_watch_dir, so we need to verify here + if entry_path.parent == Path.cwd() and not Path.cwd() in self.dirs_includes: + self.watched_files[entry.path] = False + return False + for include_pattern in self.includes: + if entry_path.match(include_pattern): + for exclude_pattern in self.excludes: + if entry_path.match(exclude_pattern): + self.watched_files[entry.path] = False + return False + self.watched_files[entry.path] = True + return True + self.watched_files[entry.path] = False + return False + + def should_watch_dir(self, entry: "DirEntry") -> bool: + cached_result = self.watched_dirs.get(entry.path) + if cached_result is not None: + return cached_result + + entry_path = Path(entry) + + if entry_path in self.dirs_excludes: + self.watched_dirs[entry.path] = False + return False + + for exclude_pattern in self.excludes: + if entry_path.match(exclude_pattern): + is_watched = False + if entry_path in self.dirs_includes: + is_watched = True + + for directory in self.dirs_includes: + if directory in entry_path.parents: + is_watched = True + + if is_watched: + logger.debug( + "WatchGodReload detected a new excluded dir '%s' in '%s'; " + "Adding to exclude list.", + entry_path.relative_to(self.resolved_root), + str(self.resolved_root), + ) + self.watched_dirs[entry.path] = False + self.dirs_excludes.add(entry_path) + return False + + if entry_path in self.dirs_includes: + self.watched_dirs[entry.path] = True + return True + + for directory in self.dirs_includes: + if directory in entry_path.parents: + self.watched_dirs[entry.path] = True + return True + + for include_pattern in self.includes: + if entry_path.match(include_pattern): + logger.info( + "WatchGodReload detected a new reload dir '%s' in '%s'; " + "Adding to watch list.", + str(entry_path.relative_to(self.resolved_root)), + str(self.resolved_root), + ) + self.dirs_includes.add(entry_path) + self.watched_dirs[entry.path] = True + return True + + self.watched_dirs[entry.path] = False + return False + + +class WatchGodReload(BaseReload): + def __init__( + self, + config: Config, + target: Callable[[Optional[List[socket]]], None], + sockets: List[socket], + ) -> None: + warnings.warn( + '"watchgod" is depreciated, you should switch ' + "to watchfiles (`pip install watchfiles`).", + DeprecationWarning, + ) + super().__init__(config, target, sockets) + self.reloader_name = "WatchGod" + self.watchers = [] + reload_dirs = [] + for directory in config.reload_dirs: + if Path.cwd() not in directory.parents: + reload_dirs.append(directory) + if Path.cwd() not in reload_dirs: + reload_dirs.append(Path.cwd()) + for w in reload_dirs: + self.watchers.append(CustomWatcher(w.resolve(), self.config)) + + def should_restart(self) -> Optional[List[Path]]: + self.pause() + + for watcher in self.watchers: + change = watcher.check() + if change != set(): + return list({Path(c[1]) for c in change}) + + return None diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/workers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/workers.py new file mode 100644 index 00000000..f28c3a8d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/uvicorn/workers.py @@ -0,0 +1,90 @@ +import asyncio +import logging +import signal +import sys +from typing import Any + +from gunicorn.arbiter import Arbiter +from gunicorn.workers.base import Worker + +from uvicorn.config import Config +from uvicorn.main import Server + + +class UvicornWorker(Worker): + """ + A worker class for Gunicorn that interfaces with an ASGI consumer callable, + rather than a WSGI callable. + """ + + CONFIG_KWARGS = {"loop": "auto", "http": "auto"} + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super(UvicornWorker, self).__init__(*args, **kwargs) + + logger = logging.getLogger("uvicorn.error") + logger.handlers = self.log.error_log.handlers + logger.setLevel(self.log.error_log.level) + logger.propagate = False + + logger = logging.getLogger("uvicorn.access") + logger.handlers = self.log.access_log.handlers + logger.setLevel(self.log.access_log.level) + logger.propagate = False + + config_kwargs: dict = { + "app": None, + "log_config": None, + "timeout_keep_alive": self.cfg.keepalive, + "timeout_notify": self.timeout, + "callback_notify": self.callback_notify, + "limit_max_requests": self.max_requests, + "forwarded_allow_ips": self.cfg.forwarded_allow_ips, + } + + if self.cfg.is_ssl: + ssl_kwargs = { + "ssl_keyfile": self.cfg.ssl_options.get("keyfile"), + "ssl_certfile": self.cfg.ssl_options.get("certfile"), + "ssl_keyfile_password": self.cfg.ssl_options.get("password"), + "ssl_version": self.cfg.ssl_options.get("ssl_version"), + "ssl_cert_reqs": self.cfg.ssl_options.get("cert_reqs"), + "ssl_ca_certs": self.cfg.ssl_options.get("ca_certs"), + "ssl_ciphers": self.cfg.ssl_options.get("ciphers"), + } + config_kwargs.update(ssl_kwargs) + + if self.cfg.settings["backlog"].value: + config_kwargs["backlog"] = self.cfg.settings["backlog"].value + + config_kwargs.update(self.CONFIG_KWARGS) + + self.config = Config(**config_kwargs) + + def init_process(self) -> None: + self.config.setup_event_loop() + super(UvicornWorker, self).init_process() + + def init_signals(self) -> None: + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # other signals are set up by Server.install_signal_handlers() + # See: https://github.com/encode/uvicorn/issues/894 + for s in self.SIGNALS: + signal.signal(s, signal.SIG_DFL) + + async def _serve(self) -> None: + self.config.app = self.wsgi + server = Server(config=self.config) + await server.serve(sockets=self.sockets) + if not server.started: + sys.exit(Arbiter.WORKER_BOOT_ERROR) + + def run(self) -> None: + return asyncio.run(self._serve()) + + async def callback_notify(self) -> None: + self.notify() + + +class UvicornH11Worker(UvicornWorker): + CONFIG_KWARGS = {"loop": "asyncio", "http": "h11"} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/METADATA new file mode 100644 index 00000000..440fb3df --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/METADATA @@ -0,0 +1,147 @@ +Metadata-Version: 2.1 +Name: watchfiles +Version: 0.16.1 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS +Classifier: Environment :: MacOS X +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Filesystems +Classifier: Framework :: AnyIO +Requires-Dist: anyio>=3.0.0,<4 +License-File: LICENSE +Summary: Simple, modern and high performance file watching and code reload in python. +Author-email: Samuel Colvin +Requires-Python: >=3.7 +Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM +Project-URL: Source, https://github.com/samuelcolvin/watchfiles +Project-URL: Documentation, https://watchfiles.helpmanual.io +Project-URL: Homepage, https://github.com/samuelcolvin/watchfiles +Project-URL: Changelog, https://github.com/samuelcolvin/watchfiles/releases +Project-URL: Funding, https://github.com/sponsors/samuelcolvin + +# watchfiles + +[![CI](https://github.com/samuelcolvin/watchfiles/workflows/ci/badge.svg?event=push)](https://github.com/samuelcolvin/watchfiles/actions?query=event%3Apush+branch%3Amain+workflow%3Aci) +[![Coverage](https://codecov.io/gh/samuelcolvin/watchfiles/branch/main/graph/badge.svg)](https://codecov.io/gh/samuelcolvin/watchfiles) +[![pypi](https://img.shields.io/pypi/v/watchfiles.svg)](https://pypi.python.org/pypi/watchfiles) +[![license](https://img.shields.io/github/license/samuelcolvin/watchfiles.svg)](https://github.com/samuelcolvin/watchfiles/blob/main/LICENSE) + +Simple, modern and high performance file watching and code reload in python. + +--- + +**Documentation**: [watchfiles.helpmanual.io](https://watchfiles.helpmanual.io) + +**Source Code**: [github.com/samuelcolvin/watchfiles](https://github.com/samuelcolvin/watchfiles) + +--- + +Underlying file system notifications are handled by the [Notify](https://github.com/notify-rs/notify) rust library. + +This package was previously named "watchgod", +see [the migration guide](https://watchfiles.helpmanual.io/migrating/) for more information. + +## Installation + +**watchfiles** requires Python 3.7 - 3.10. + +```bash +pip install watchfiles +``` + +Binaries are available for: + +* **Linux**: `x86_64`, `aarch64`, `i686`, `armv7l`, `musl-x86_64` & `musl-aarch64` +* **MacOS**: `x86_64` & `arm64` (except python 3.7) +* **Windows**: `amd64` & `win32` + +Otherwise, you can install from source which requires Rust stable to be installed. + +## Usage + +Here are some examples of what **watchfiles** can do: + +### `watch` Usage + +```py +from watchfiles import watch + +for changes in watch('./path/to/dir'): + print(changes) +``` +See [`watch` docs](https://watchfiles.helpmanual.io/api/watch/#watchfiles.watch) for more details. + +### `awatch` Usage + +```py +import asyncio +from watchfiles import awatch + +async def main(): + async for changes in awatch('/path/to/dir'): + print(changes) + +asyncio.run(main()) +``` +See [`awatch` docs](https://watchfiles.helpmanual.io/api/watch/#watchfiles.awatch) for more details. + +### `run_process` Usage + +```py +from watchfiles import run_process + +def foobar(a, b, c): + ... + +if __name__ == '__main__': + run_process('./path/to/dir', target=foobar, args=(1, 2, 3)) +``` +See [`run_process` docs](https://watchfiles.helpmanual.io/api/run_process/#watchfiles.run_process) for more details. + +### `arun_process` Usage + +```py +import asyncio +from watchfiles import arun_process + +def foobar(a, b, c): + ... + +async def main(): + await arun_process('./path/to/dir', target=foobar, args=(1, 2, 3)) + +if __name__ == '__main__': + asyncio.run(main()) +``` +See [`arun_process` docs](https://watchfiles.helpmanual.io/api/run_process/#watchfiles.arun_process) for more details. + +## CLI + +**watchfiles** also comes with a CLI for running and reloading code. To run `some command` when files in `src` change: + +``` +watchfiles "some command" src +``` + +For more information, see [the CLI docs](https://watchfiles.helpmanual.io/cli/). + +Or run + +```bash +watchfiles --help +``` + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/RECORD new file mode 100644 index 00000000..30618337 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/RECORD @@ -0,0 +1,24 @@ +../../Scripts/watchfiles.exe,sha256=QMfA3FK0FohZfOOXT3n-LhY11W6YLdkWQ3IlhZhVqSc,106373 +watchfiles-0.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +watchfiles-0.16.1.dist-info/METADATA,sha256=WAH-hzuRX5vBZW4GH3HiTH_oLKSYiqURqB5iwl540go,4766 +watchfiles-0.16.1.dist-info/RECORD,, +watchfiles-0.16.1.dist-info/WHEEL,sha256=HxhXltn1XXIfoc5D9ilfTZkcDZBkaaDcRoY57WxHST8,95 +watchfiles-0.16.1.dist-info/entry_points.txt,sha256=s1Dpa2d_KKBy-jKREWW60Z3GoRZ3JpCEo_9iYDt6hOQ,48 +watchfiles-0.16.1.dist-info/license_files/LICENSE,sha256=OJNLTJ74C1LXy3zC9PxI5qMeL3k2sWtQawqK7bVd5iM,1131 +watchfiles/__init__.py,sha256=LKmDAbDbm78xfyhVQ-W_6TWfT1Cg53NUaHqNhwu2plo,381 +watchfiles/__main__.py,sha256=g9iBs8xxX6Yik7cmgllQkpBN8C4JNoZVsEOyCCLCyFU,63 +watchfiles/__pycache__/__init__.cpython-37.pyc,, +watchfiles/__pycache__/__main__.cpython-37.pyc,, +watchfiles/__pycache__/cli.cpython-37.pyc,, +watchfiles/__pycache__/filters.cpython-37.pyc,, +watchfiles/__pycache__/main.cpython-37.pyc,, +watchfiles/__pycache__/run.cpython-37.pyc,, +watchfiles/__pycache__/version.cpython-37.pyc,, +watchfiles/_rust_notify.pyd,sha256=TNFOOX4FqecXQT3qv16y-wVofCTbkhxstoUHBYDETMo,589824 +watchfiles/_rust_notify.pyi,sha256=7_SUkU-l5qcwllflsB_qa7pqPK9UZ0v5KPxlRqSb0Ho,4497 +watchfiles/cli.py,sha256=W0FsNws4RAujae4QfmoUzrz1pLeCUXh0mmHlC0AqdcQ,7207 +watchfiles/filters.py,sha256=56UwipWyHmOy9oWBQ0K9yEGcJMe-Xesf2W0njKeWtIk,5240 +watchfiles/main.py,sha256=XbwiubVOBgjCveTa4ny7qJnDR8G3q7gWdyZcveAlti8,11789 +watchfiles/py.typed,sha256=5_E2MNyI4Mzg8TH53HFdXg0iSevlmHpdZSNnJVUYbMg,70 +watchfiles/run.py,sha256=m9mWrHSbQTp9g6nNdN5JMI4GMl3MtRkk8QiWbGbobkA,13961 +watchfiles/version.py,sha256=dbZJrjIgVt2QedB7aWTu41SUWaPDjUCabWBao2Ow1SU,90 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/WHEEL new file mode 100644 index 00000000..df3f3487 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: maturin (0.13.0) +Root-Is-Purelib: false +Tag: cp37-abi3-win_amd64 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/entry_points.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/entry_points.txt new file mode 100644 index 00000000..51642969 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +watchfiles=watchfiles.cli:cli diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/license_files/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/license_files/LICENSE new file mode 100644 index 00000000..08c9a8d3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles-0.16.1.dist-info/license_files/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017, 2018, 2019, 2020, 2021, 2022 Samuel Colvin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__init__.py new file mode 100644 index 00000000..877fbd57 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__init__.py @@ -0,0 +1,17 @@ +from .filters import BaseFilter, DefaultFilter, PythonFilter +from .main import Change, awatch, watch +from .run import arun_process, run_process +from .version import VERSION + +__version__ = VERSION +__all__ = ( + 'watch', + 'awatch', + 'run_process', + 'arun_process', + 'Change', + 'BaseFilter', + 'DefaultFilter', + 'PythonFilter', + 'VERSION', +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__main__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__main__.py new file mode 100644 index 00000000..d396c2a7 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/__main__.py @@ -0,0 +1,4 @@ +from .cli import cli + +if __name__ == '__main__': + cli() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyd new file mode 100644 index 00000000..b95a16fb Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyi b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyi new file mode 100644 index 00000000..cedeefc4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/_rust_notify.pyi @@ -0,0 +1,100 @@ +from typing import Any, List, Literal, Optional, Protocol, Set, Tuple, Union + +__all__ = 'RustNotify', 'WatchfilesRustInternalError' + +__version__: str +"""The package version as defined in `Cargo.toml`, modified to match python's versioning semantics.""" + +class AbstractEvent(Protocol): + def is_set(self) -> bool: ... + +class RustNotify: + """ + Interface to the Rust [notify](https://crates.io/crates/notify) crate which does + the heavy lifting of watching for file changes and grouping them into events. + """ + + def __init__(self, watch_paths: List[str], debug: bool, force_polling: bool, poll_delay_ms: int) -> None: + """ + Create a new `RustNotify` instance and start a thread to watch for changes. + + `FileNotFoundError` is raised if any of the paths do not exist. + + Args: + watch_paths: file system paths to watch for changes, can be directories or files + debug: if true, print details about all events to stderr + force_polling: if true, always use polling instead of file system notifications + poll_delay_ms: delay between polling for changes, only used if `force_polling=True` + """ + def watch( + self, + debounce_ms: int, + step_ms: int, + timeout_ms: int, + stop_event: Optional[AbstractEvent], + ) -> Union[Set[Tuple[int, str]], Literal['signal', 'stop', 'timeout']]: + """ + Watch for changes. + + This method will wait `timeout_ms` milliseconds for changes, but once a change is detected, + it will group changes and return in no more than `debounce_ms` milliseconds. + + The GIL is released during a `step_ms` sleep on each iteration to avoid + blocking python. + + Args: + debounce_ms: maximum time in milliseconds to group changes over before returning. + step_ms: time to wait for new changes in milliseconds, if no changes are detected + in this time, and at least one change has been detected, the changes are yielded. + timeout_ms: maximum time in milliseconds to wait for changes before returning, + `0` means wait indefinitely, `debounce_ms` takes precedence over `timeout_ms` once + a change is detected. + stop_event: event to check on every iteration to see if this function should return early. + The event should be an object which has an `is_set()` method which returns a boolean. + + Returns: + See below. + + Return values have the following meanings: + + * Change details as a `set` of `(event_type, path)` tuples, the event types are ints which match + [`Change`][watchfiles.Change], `path` is a string representing the path of the file that changed + * `'signal'` string, if a signal was received + * `'stop'` string, if the `stop_event` was set + * `'timeout'` string, if `timeout_ms` was exceeded + """ + def __enter__(self) -> 'RustNotify': + """ + Does nothing, but allows `RustNotify` to be used as a context manager. + + !!! note + + The watching thead is created when an instance is initiated, not on `__enter__`. + """ + def __exit__(self, *args: Any) -> None: + """ + Calls [`close`][watchfiles._rust_notify.RustNotify.close]. + """ + def close(self) -> None: + """ + Stops the watching thread. After `close` is called, the `RustNotify` instance can no + longer be used, calls to [`watch`][watchfiles._rust_notify.RustNotify.watch] will raise a `RuntimeError`. + + !!! note + + `close` is not required, just deleting the `RustNotify` instance will kill the thread + implicitly. + + As per [#163](https://github.com/samuelcolvin/watchfiles/issues/163) `close()` is only required because + in the event of an error, the traceback in `sys.exc_info` keeps a reference to `watchfiles.watch`'s + frame, so you can't rely on the `RustNotify` object being deleted, and thereby stopping + the watching thread. + """ + +class WatchfilesRustInternalError(RuntimeError): + """ + Raised when RustNotify encounters an unknown error. + + If you get this a lot, please check [github](https://github.com/samuelcolvin/watchfiles/issues) issues + and create a new issue if your problem is not discussed. + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/cli.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/cli.py new file mode 100644 index 00000000..d96dcf4f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/cli.py @@ -0,0 +1,206 @@ +import argparse +import logging +import os +import shlex +import sys +from pathlib import Path +from textwrap import dedent +from typing import Any, Callable, List, Optional, Tuple, Union, cast + +from . import Change +from .filters import BaseFilter, DefaultFilter, PythonFilter +from .run import detect_target_type, import_string, run_process +from .version import VERSION + +logger = logging.getLogger('watchfiles.cli') + + +def resolve_path(path_str: str) -> Path: + path = Path(path_str) + if not path.exists(): + raise FileNotFoundError(path) + else: + return path.resolve() + + +def cli(*args_: str) -> None: + """ + Watch one or more directories and execute either a shell command or a python function on file changes. + + Example of watching the current directory and calling a python function: + + watchfiles foobar.main + + Example of watching python files in two local directories and calling a shell command: + + watchfiles --filter python 'pytest --lf' src tests + + See https://watchfiles.helpmanual.io/cli/ for more information. + """ + args = args_ or sys.argv[1:] + parser = argparse.ArgumentParser( + prog='watchfiles', + description=dedent((cli.__doc__ or '').strip('\n')), + formatter_class=argparse.RawTextHelpFormatter, + ) + parser.add_argument('target', help='Command or dotted function path to run') + parser.add_argument( + 'paths', nargs='*', default='.', help='Filesystem paths to watch, defaults to current directory' + ) + + parser.add_argument( + '--ignore-paths', + nargs='?', + type=str, + help=( + 'Specify directories to ignore, ' + 'to ignore multiple paths use a comma as separator, e.g. "env" or "env,node_modules"' + ), + ) + parser.add_argument( + '--target-type', + nargs='?', + type=str, + default='auto', + choices=['command', 'function', 'auto'], + help=( + 'Whether the target should be intercepted as a shell command or a python function, ' + 'defaults to "auto" which infers the target type from the target string' + ), + ) + parser.add_argument( + '--filter', + nargs='?', + type=str, + default='default', + help=( + 'Which files to watch, defaults to "default" which uses the "DefaultFilter", ' + '"python" uses the "PythonFilter", "all" uses no filter, ' + 'any other value is interpreted as a python function/class path which is imported' + ), + ) + parser.add_argument( + '--args', + nargs='?', + type=str, + help='Arguments to set on sys.argv before calling target function, used only if the target is a function', + ) + parser.add_argument('--verbose', action='store_true', help='Set log level to "debug", wins over `--verbosity`') + parser.add_argument( + '--verbosity', + nargs='?', + type=str, + default='info', + choices=['warning', 'info', 'debug'], + help='Log level, defaults to "info"', + ) + parser.add_argument( + '--sigint-timeout', + nargs='?', + type=int, + default=5, + help='How long to wait for the sigint timeout before sending sigkill.', + ) + parser.add_argument( + '--sigkill-timeout', + nargs='?', + type=int, + default=1, + help='How long to wait for the sigkill timeout before issuing a timeout exception.', + ) + parser.add_argument('--version', '-V', action='version', version=f'%(prog)s v{VERSION}') + arg_namespace = parser.parse_args(args) + + if arg_namespace.verbose: + log_level = logging.DEBUG + else: + log_level = getattr(logging, arg_namespace.verbosity.upper()) + + hdlr = logging.StreamHandler() + hdlr.setLevel(log_level) + hdlr.setFormatter(logging.Formatter(fmt='[%(asctime)s] %(message)s', datefmt='%H:%M:%S')) + wg_logger = logging.getLogger('watchfiles') + wg_logger.addHandler(hdlr) + wg_logger.setLevel(log_level) + + if arg_namespace.target_type == 'auto': + target_type = detect_target_type(arg_namespace.target) + else: + target_type = arg_namespace.target_type + + if target_type == 'function': + logger.debug('target_type=function, attempting import of "%s"', arg_namespace.target) + import_exit(arg_namespace.target) + if arg_namespace.args: + sys.argv = [arg_namespace.target] + shlex.split(arg_namespace.args) + elif arg_namespace.args: + logger.warning('--args is only used when the target is a function') + + try: + paths = [resolve_path(p) for p in arg_namespace.paths] + except FileNotFoundError as e: + print(f'path "{e}" does not exist', file=sys.stderr) + sys.exit(1) + + watch_filter, watch_filter_str = build_filter(arg_namespace.filter, arg_namespace.ignore_paths) + + logger.info( + 'watchfiles v%s 👀 path=%s target="%s" (%s) filter=%s...', + VERSION, + ', '.join(f'"{p}"' for p in paths), + arg_namespace.target, + target_type, + watch_filter_str, + ) + + run_process( + *paths, + target=arg_namespace.target, + target_type=target_type, + watch_filter=watch_filter, + debug=log_level == logging.DEBUG, + sigint_timeout=arg_namespace.sigint_timeout, + sigkill_timeout=arg_namespace.sigkill_timeout, + ) + + +def import_exit(function_path: str) -> Any: + cwd = os.getcwd() + if cwd not in sys.path: + sys.path.append(cwd) + + try: + return import_string(function_path) + except ImportError as e: + print(f'ImportError: {e}', file=sys.stderr) + sys.exit(1) + + +def build_filter( + filter_name: str, ignore_paths_str: Optional[str] +) -> Tuple[Union[None, DefaultFilter, Callable[[Change, str], bool]], str]: + ignore_paths: List[Path] = [] + if ignore_paths_str: + ignore_paths = [Path(p).resolve() for p in ignore_paths_str.split(',')] + + if filter_name == 'default': + return DefaultFilter(ignore_paths=ignore_paths), 'DefaultFilter' + elif filter_name == 'python': + return PythonFilter(ignore_paths=ignore_paths), 'PythonFilter' + elif filter_name == 'all': + if ignore_paths: + logger.warning('"--ignore-paths" argument ignored as "all" filter was selected') + return None, '(no filter)' + + watch_filter_cls = import_exit(filter_name) + if isinstance(watch_filter_cls, type) and issubclass(watch_filter_cls, DefaultFilter): + return watch_filter_cls(ignore_paths=ignore_paths), watch_filter_cls.__name__ + + if ignore_paths: + logger.warning('"--ignore-paths" argument ignored as filter is not a subclass of DefaultFilter') + + if isinstance(watch_filter_cls, type) and issubclass(watch_filter_cls, BaseFilter): + return watch_filter_cls(), watch_filter_cls.__name__ + else: + watch_filter = cast(Callable[[Change, str], bool], watch_filter_cls) + return watch_filter, repr(watch_filter_cls) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/filters.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/filters.py new file mode 100644 index 00000000..a55bf223 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/filters.py @@ -0,0 +1,147 @@ +import logging +import os +import re +from pathlib import Path +from typing import TYPE_CHECKING, Optional, Sequence, Union + +__all__ = 'BaseFilter', 'DefaultFilter', 'PythonFilter' +logger = logging.getLogger('watchfiles.watcher') + + +if TYPE_CHECKING: + from .main import Change + + +class BaseFilter: + """ + Useful base class for creating filters. `BaseFilter` should be inherited and configured, rather than used + directly. + + The class supports ignoring files in 3 ways: + """ + + __slots__ = '_ignore_dirs', '_ignore_entity_regexes', '_ignore_paths' + ignore_dirs: Sequence[str] = () + """Full names of directories to ignore, an obvious example would be `.git`.""" + ignore_entity_patterns: Sequence[str] = () + """ + Patterns of files or directories to ignore, these are compiled into regexes. + + "entity" here refers to the specific file or directory - basically the result of `path.split(os.sep)[-1]`, + an obvious example would be `r'\\.py[cod]$'`. + """ + ignore_paths: Sequence[Union[str, Path]] = () + """ + Full paths to ignore, e.g. `/home/users/.cache` or `C:\\Users\\user\\.cache`. + """ + + def __init__(self) -> None: + self._ignore_dirs = set(self.ignore_dirs) + self._ignore_entity_regexes = tuple(re.compile(r) for r in self.ignore_entity_patterns) + self._ignore_paths = tuple(map(str, self.ignore_paths)) + + def __call__(self, change: 'Change', path: str) -> bool: + """ + Instances of `BaseFilter` subclasses can be used as callables. + Args: + change: The type of change that occurred, see [`Change`][watchfiles.Change]. + path: the raw path of the file or directory that changed. + + Returns: + True if the file should be included in changes, False if it should be ignored. + """ + parts = path.lstrip(os.sep).split(os.sep) + if any(p in self._ignore_dirs for p in parts): + return False + + entity_name = parts[-1] + if any(r.search(entity_name) for r in self._ignore_entity_regexes): + return False + elif self._ignore_paths and path.startswith(self._ignore_paths): + return False + else: + return True + + def __repr__(self) -> str: + args = ', '.join(f'{k}={getattr(self, k, None)!r}' for k in self.__slots__) + return f'{self.__class__.__name__}({args})' + + +class DefaultFilter(BaseFilter): + """ + The default filter, which ignores files and directories that you might commonly want to ignore. + """ + + ignore_dirs: Sequence[str] = ( + '__pycache__', + '.git', + '.hg', + '.svn', + '.tox', + '.venv', + 'site-packages', + '.idea', + 'node_modules', + ) + """Directory names to ignore.""" + + ignore_entity_patterns: Sequence[str] = ( + r'\.py[cod]$', + r'\.___jb_...___$', + r'\.sw.$', + '~$', + r'^\.\#', + r'^\.DS_Store$', + r'^flycheck_', + ) + """File/Directory name patterns to ignore.""" + + def __init__( + self, + *, + ignore_dirs: Optional[Sequence[str]] = None, + ignore_entity_patterns: Optional[Sequence[str]] = None, + ignore_paths: Optional[Sequence[Union[str, Path]]] = None, + ) -> None: + """ + Args: + ignore_dirs: if not `None`, overrides the `ignore_dirs` value set on the class. + ignore_entity_patterns: if not `None`, overrides the `ignore_entity_patterns` value set on the class. + ignore_paths: if not `None`, overrides the `ignore_paths` value set on the class. + """ + if ignore_dirs is not None: + self.ignore_dirs = ignore_dirs + if ignore_entity_patterns is not None: + self.ignore_entity_patterns = ignore_entity_patterns + if ignore_paths is not None: + self.ignore_paths = ignore_paths + + super().__init__() + + +class PythonFilter(DefaultFilter): + """ + A filter for Python files, since this class inherits from [`DefaultFilter`][watchfiles.DefaultFilter] + it will ignore files and directories that you might commonly want to ignore as well as filtering out + all changes except in Python files (files with extensions `('.py', '.pyx', '.pyd')`). + """ + + def __init__( + self, + *, + ignore_paths: Optional[Sequence[Union[str, Path]]] = None, + extra_extensions: Sequence[str] = (), + ) -> None: + """ + Args: + ignore_paths: The paths to ignore, see [`BaseFilter`][watchfiles.BaseFilter]. + extra_extensions: extra extensions to ignore. + + `ignore_paths` and `extra_extensions` can be passed as arguments partly to support [CLI](../cli.md) usage where + `--ignore-paths` and `--extensions` can be passed as arguments. + """ + self.extensions = ('.py', '.pyx', '.pyd') + tuple(extra_extensions) + super().__init__(ignore_paths=ignore_paths) + + def __call__(self, change: 'Change', path: str) -> bool: + return path.endswith(self.extensions) and super().__call__(change, path) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/main.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/main.py new file mode 100644 index 00000000..7af2796a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/main.py @@ -0,0 +1,293 @@ +import logging +import os +import sys +import warnings +from enum import IntEnum +from pathlib import Path +from typing import TYPE_CHECKING, AsyncGenerator, Callable, Generator, Optional, Set, Tuple, Union + +import anyio + +from ._rust_notify import RustNotify +from .filters import DefaultFilter + +__all__ = 'watch', 'awatch', 'Change', 'FileChange' +logger = logging.getLogger('watchfiles.main') + + +class Change(IntEnum): + """ + Enum representing the type of change that occurred. + """ + + added = 1 + """A new file or directory was added.""" + modified = 2 + """A file or directory was modified, can be either a metadata or data change.""" + deleted = 3 + """A file or directory was deleted.""" + + def raw_str(self) -> str: + if self == Change.added: + return 'added' + elif self == Change.modified: + return 'modified' + else: + return 'deleted' + + +FileChange = Tuple[Change, str] +""" +A tuple representing a file change, first element is a [`Change`][watchfiles.Change] member, second is the path +of the file or directory that changed. +""" + +if TYPE_CHECKING: + import asyncio + from typing import Protocol + + import trio + + AnyEvent = Union[anyio.Event, asyncio.Event, trio.Event] + + class AbstractEvent(Protocol): + def is_set(self) -> bool: + ... + + +def watch( + *paths: Union[Path, str], + watch_filter: Optional[Callable[['Change', str], bool]] = DefaultFilter(), + debounce: int = 1_600, + step: int = 50, + stop_event: Optional['AbstractEvent'] = None, + rust_timeout: int = 5_000, + yield_on_timeout: bool = False, + debug: bool = False, + raise_interrupt: bool = True, + force_polling: Optional[bool] = None, + poll_delay_ms: int = 30, +) -> Generator[Set[FileChange], None, None]: + """ + Watch one or more paths and yield a set of changes whenever files change. + + The paths watched can be directories or files, directories are watched recursively - changes in subdirectories + are also detected. + + Args: + *paths: filesystem paths to watch. + watch_filter: callable used to filter out changes which are not important, you can either use a raw callable + or a [`BaseFilter`][watchfiles.BaseFilter] instance, + defaults to an instance of [`DefaultFilter`][watchfiles.DefaultFilter]. To keep all changes, use `None`. + debounce: maximum time in milliseconds to group changes over before yielding them. + step: time to wait for new changes in milliseconds, if no changes are detected in this time, and + at least one change has been detected, the changes are yielded. + stop_event: event to stop watching, if this is set, the generator will stop iteration, + this can be anything with an `is_set()` method which returns a bool, e.g. `threading.Event()`. + rust_timeout: maximum time in milliseconds to wait in the rust code for changes, `0` means no timeout. + yield_on_timeout: if `True`, the generator will yield upon timeout in rust even if no changes are detected. + debug: whether to print information about all filesystem changes in rust to stdout. + raise_interrupt: whether to re-raise `KeyboardInterrupt`s, or suppress the error and just stop iterating. + force_polling: if `True`, always use polling instead of file system notifications, default is `None` where + `force_polling` is set to `True` if the `WATCHFILES_FORCE_POLLING` environment variable exists. + poll_delay_ms: delay between polling for changes, only used if `force_polling=True`. + + Yields: + The generator yields sets of [`FileChange`][watchfiles.main.FileChange]s. + + ```py title="Example of watch usage" + from watchfiles import watch + + for changes in watch('./first/dir', './second/dir', raise_interrupt=False): + print(changes) + ``` + """ + force_polling = _default_force_pulling(force_polling) + with RustNotify([str(p) for p in paths], debug, force_polling, poll_delay_ms) as watcher: + while True: + raw_changes = watcher.watch(debounce, step, rust_timeout, stop_event) + if raw_changes == 'timeout': + if yield_on_timeout: + yield set() + else: + logger.debug('rust notify timeout, continuing') + elif raw_changes == 'signal': + if raise_interrupt: + raise KeyboardInterrupt + else: + logger.warning('KeyboardInterrupt caught, stopping watch') + return + elif raw_changes == 'stop': + return + else: + changes = _prep_changes(raw_changes, watch_filter) + if changes: + _log_changes(changes) + yield changes + + +async def awatch( # noqa C901 + *paths: Union[Path, str], + watch_filter: Optional[Callable[[Change, str], bool]] = DefaultFilter(), + debounce: int = 1_600, + step: int = 50, + stop_event: Optional['AnyEvent'] = None, + rust_timeout: Optional[int] = None, + yield_on_timeout: bool = False, + debug: bool = False, + raise_interrupt: Optional[bool] = None, + force_polling: Optional[bool] = None, + poll_delay_ms: int = 30, +) -> AsyncGenerator[Set[FileChange], None]: + """ + Asynchronous equivalent of [`watch`][watchfiles.watch] using threads to wait for changes. + Arguments match those of [`watch`][watchfiles.watch] except `stop_event`. + + All async methods use [anyio](https://anyio.readthedocs.io/en/latest/) to run the event loop. + + Unlike [`watch`][watchfiles.watch] `KeyboardInterrupt` cannot be suppressed by `awatch` so they need to be caught + where `asyncio.run` or equivalent is called. + + Args: + *paths: filesystem paths to watch. + watch_filter: matches the same argument of [`watch`][watchfiles.watch]. + debounce: matches the same argument of [`watch`][watchfiles.watch]. + step: matches the same argument of [`watch`][watchfiles.watch]. + stop_event: `anyio.Event` which can be used to stop iteration, see example below. + rust_timeout: matches the same argument of [`watch`][watchfiles.watch], except that `None` means + use `1_000` on Windows and `5_000` on other platforms thus helping with exiting on `Ctrl+C` on Windows, + see [#110](https://github.com/samuelcolvin/watchfiles/issues/110). + yield_on_timeout: matches the same argument of [`watch`][watchfiles.watch]. + debug: matches the same argument of [`watch`][watchfiles.watch]. + raise_interrupt: This is deprecated, `KeyboardInterrupt` will cause this coroutine to be cancelled and then + be raised by the top level `asyncio.run` call or equivalent, and should be caught there. + See [#136](https://github.com/samuelcolvin/watchfiles/issues/136) + force_polling: if true, always use polling instead of file system notifications, default is `None` where + `force_polling` is set to `True` if the `WATCHFILES_FORCE_POLLING` environment variable exists. + poll_delay_ms: delay between polling for changes, only used if `force_polling=True`. + + Yields: + The generator yields sets of [`FileChange`][watchfiles.main.FileChange]s. + + ```py title="Example of awatch usage" + import asyncio + from watchfiles import awatch + + async def main(): + async for changes in awatch('./first/dir', './second/dir'): + print(changes) + + if __name__ == '__main__': + try: + asyncio.run(main()) + except KeyboardInterrupt: + print('stopped via KeyboardInterrupt') + ``` + + ```py title="Example of awatch usage with a stop event" + import asyncio + from watchfiles import awatch + + async def main(): + stop_event = asyncio.Event() + + async def stop_soon(): + await asyncio.sleep(3) + stop_event.set() + + stop_soon_task = asyncio.create_task(stop_soon()) + + async for changes in awatch('/path/to/dir', stop_event=stop_event): + print(changes) + + # cleanup by awaiting the (now complete) stop_soon_task + await stop_soon_task + + asyncio.run(main()) + ``` + """ + if raise_interrupt is not None: + warnings.warn( + 'raise_interrupt is deprecated, KeyboardInterrupt will cause this coroutine to be cancelled and then ' + 'be raised by the top level asyncio.run call or equivalent, and should be caught there. See #136.', + DeprecationWarning, + ) + + if stop_event is None: + stop_event_: 'AnyEvent' = anyio.Event() + else: + stop_event_ = stop_event + + force_polling = _default_force_pulling(force_polling) + with RustNotify([str(p) for p in paths], debug, force_polling, poll_delay_ms) as watcher: + timeout = _calc_async_timeout(rust_timeout) + CancelledError = anyio.get_cancelled_exc_class() + + while True: + async with anyio.create_task_group() as tg: + try: + raw_changes = await anyio.to_thread.run_sync(watcher.watch, debounce, step, timeout, stop_event_) + except (CancelledError, KeyboardInterrupt): + stop_event_.set() + # suppressing KeyboardInterrupt wouldn't stop it getting raised by the top level asyncio.run call + raise + tg.cancel_scope.cancel() + + if raw_changes == 'timeout': + if yield_on_timeout: + yield set() + else: + logger.debug('rust notify timeout, continuing') + elif raw_changes == 'stop': + return + elif raw_changes == 'signal': + # in theory the watch thread should never get a signal + raise RuntimeError('watch thread unexpectedly received a signal') + else: + changes = _prep_changes(raw_changes, watch_filter) + if changes: + _log_changes(changes) + yield changes + + +def _prep_changes( + raw_changes: Set[Tuple[int, str]], watch_filter: Optional[Callable[[Change, str], bool]] +) -> Set[FileChange]: + # if we wanted to be really snazzy, we could move this into rust + changes = {(Change(change), path) for change, path in raw_changes} + if watch_filter: + changes = {c for c in changes if watch_filter(c[0], c[1])} + return changes + + +def _log_changes(changes: Set[FileChange]) -> None: + if logger.isEnabledFor(logging.INFO): # pragma: no branch + count = len(changes) + plural = '' if count == 1 else 's' + if logger.isEnabledFor(logging.DEBUG): + logger.debug('%d change%s detected: %s', count, plural, changes) + else: + logger.info('%d change%s detected', count, plural) + + +def _calc_async_timeout(timeout: Optional[int]) -> int: + """ + see https://github.com/samuelcolvin/watchfiles/issues/110 + """ + if timeout is None: + if sys.platform == 'win32': + return 1_000 + else: + return 5_000 + else: + return timeout + + +def _default_force_pulling(force_polling: Optional[bool]) -> bool: + """ + https://github.com/samuelcolvin/watchfiles/issues/167#issuecomment-1189309354 for rationale. + """ + if force_polling is None: + return 'WATCHFILES_FORCE_POLLING' in os.environ + else: + return force_polling diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/py.typed new file mode 100644 index 00000000..7cd6d6f0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. The watchfiles package uses inline types. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/run.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/run.py new file mode 100644 index 00000000..0d0bfc2c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/run.py @@ -0,0 +1,384 @@ +import contextlib +import json +import logging +import os +import re +import shlex +import signal +import subprocess +import sys +from importlib import import_module +from multiprocessing import get_context +from multiprocessing.context import SpawnProcess +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Set, Tuple, Union + +import anyio + +from .filters import DefaultFilter +from .main import Change, FileChange, awatch, watch + +if TYPE_CHECKING: + try: + from typing import Literal + except ImportError: + from typing_extensions import Literal # type: ignore[misc] + +__all__ = 'run_process', 'arun_process', 'detect_target_type', 'import_string' +logger = logging.getLogger('watchfiles.main') + + +def run_process( + *paths: Union[Path, str], + target: Union[str, Callable[..., Any]], + args: Tuple[Any, ...] = (), + kwargs: Optional[Dict[str, Any]] = None, + target_type: "Literal['function', 'command', 'auto']" = 'auto', + callback: Optional[Callable[[Set[FileChange]], None]] = None, + watch_filter: Optional[Callable[[Change, str], bool]] = DefaultFilter(), + debounce: int = 1_600, + step: int = 50, + debug: bool = False, + sigint_timeout: int = 5, + sigkill_timeout: int = 1, +) -> int: + """ + Run a process and restart it upon file changes. + + `run_process` can work in two ways: + + * Using `multiprocessing.Process` † to run a python function + * Or, using `subprocess.Popen` to run a command + + !!! note + + **†** technically `multiprocessing.get_context('spawn').Process` to avoid forking and improve + code reload/import. + + Internally, `run_process` uses [`watch`][watchfiles.watch] with `raise_interrupt=False` so the function + exits cleanly upon `Ctrl+C`. + + Args: + *paths: matches the same argument of [`watch`][watchfiles.watch] + target: function or command to run + args: arguments to pass to `target`, only used if `target` is a function + kwargs: keyword arguments to pass to `target`, only used if `target` is a function + target_type: type of target. Can be `'function'`, `'command'`, or `'auto'` in which case + [`detect_target_type`][watchfiles.run.detect_target_type] is used to determine the type. + callback: function to call on each reload, the function should accept a set of changes as the sole argument + watch_filter: matches the same argument of [`watch`][watchfiles.watch] + debounce: matches the same argument of [`watch`][watchfiles.watch] + step: matches the same argument of [`watch`][watchfiles.watch] + debug: matches the same argument of [`watch`][watchfiles.watch] + sigint_timeout: the number of seconds to wait after sending sigint before sending sigkill + sigkill_timeout: the number of seconds to wait after sending sigkill before raising an exception + + Returns: + number of times the function was reloaded. + + ```py title="Example of run_process running a function" + from watchfiles import run_process + + def callback(changes): + print('changes detected:', changes) + + def foobar(a, b): + print('foobar called with:', a, b) + + if __name__ == '__main__': + run_process('./path/to/dir', target=foobar, args=(1, 2), callback=callback) + ``` + + As well as using a `callback` function, changes can be accessed from within the target function, + using the `WATCHFILES_CHANGES` environment variable. + + ```py title="Example of run_process accessing changes" + from watchfiles import run_process + + def foobar(a, b, c): + # changes will be an empty list "[]" the first time the function is called + changes = os.getenv('WATCHFILES_CHANGES') + changes = json.loads(changes) + print('foobar called due to changes:', changes) + + if __name__ == '__main__': + run_process('./path/to/dir', target=foobar, args=(1, 2, 3)) + ``` + + Again with the target as `command`, `WATCHFILES_CHANGES` can be used + to access changes. + + ```bash title="example.sh" + echo "changers: ${WATCHFILES_CHANGES}" + ``` + + ```py title="Example of run_process running a command" + from watchfiles import run_process + + if __name__ == '__main__': + run_process('.', target='./example.sh') + ``` + """ + if target_type == 'auto': + target_type = detect_target_type(target) + + logger.debug('running "%s" as %s', target, target_type) + process = start_process(target, target_type, args, kwargs) + reloads = 0 + + try: + for changes in watch( + *paths, watch_filter=watch_filter, debounce=debounce, step=step, debug=debug, raise_interrupt=False + ): + callback and callback(changes) + process.stop(sigint_timeout=sigint_timeout, sigkill_timeout=sigkill_timeout) + process = start_process(target, target_type, args, kwargs, changes) + reloads += 1 + finally: + process.stop() + return reloads + + +async def arun_process( + *paths: Union[Path, str], + target: Union[str, Callable[..., Any]], + args: Tuple[Any, ...] = (), + kwargs: Optional[Dict[str, Any]] = None, + target_type: "Literal['function', 'command', 'auto']" = 'auto', + callback: Optional[Callable[[Set[FileChange]], Any]] = None, + watch_filter: Optional[Callable[[Change, str], bool]] = DefaultFilter(), + debounce: int = 1_600, + step: int = 50, + debug: bool = False, +) -> int: + """ + Async equivalent of [`run_process`][watchfiles.run_process], all arguments match those of `run_process` except + `callback` which can be a coroutine. + + Starting and stopping the process and watching for changes is done in a separate thread. + + As with `run_process`, internally `arun_process` uses [`awatch`][watchfiles.awatch], however `KeyboardInterrupt` + cannot be caught and suppressed in `awatch` so these errors need to be caught separately, see below. + + ```py title="Example of arun_process usage" + import asyncio + from watchfiles import arun_process + + async def callback(changes): + await asyncio.sleep(0.1) + print('changes detected:', changes) + + def foobar(a, b): + print('foobar called with:', a, b) + + async def main(): + await arun_process('.', target=foobar, args=(1, 2), callback=callback) + + if __name__ == '__main__': + try: + asyncio.run(main()) + except KeyboardInterrupt: + print('stopped via KeyboardInterrupt') + ``` + """ + import inspect + + if target_type == 'auto': + target_type = detect_target_type(target) + + logger.debug('running "%s" as %s', target, target_type) + process = await anyio.to_thread.run_sync(start_process, target, target_type, args, kwargs) + reloads = 0 + + async for changes in awatch(*paths, watch_filter=watch_filter, debounce=debounce, step=step, debug=debug): + if callback is not None: + r = callback(changes) + if inspect.isawaitable(r): + await r + + await anyio.to_thread.run_sync(process.stop) + process = await anyio.to_thread.run_sync(start_process, target, target_type, args, kwargs, changes) + reloads += 1 + await anyio.to_thread.run_sync(process.stop) + return reloads + + +# Use spawn context to make sure code run in subprocess +# does not reuse imported modules in main process/context +spawn_context = get_context('spawn') + + +def start_process( + target: Union[str, Callable[..., Any]], + target_type: "Literal['function', 'command']", + args: Tuple[Any, ...], + kwargs: Optional[Dict[str, Any]], + changes: Optional[Set[FileChange]] = None, +) -> 'CombinedProcess': + if changes is None: + changes_env_var = '[]' + else: + changes_env_var = json.dumps([[c.raw_str(), p] for c, p in changes]) + + os.environ['WATCHFILES_CHANGES'] = changes_env_var + + process: 'Union[SpawnProcess, subprocess.Popen[bytes]]' + if target_type == 'function': + kwargs = kwargs or {} + if isinstance(target, str): + args = target, get_tty_path(), args, kwargs + target_ = run_function + kwargs = {} + else: + target_ = target + + process = spawn_context.Process(target=target_, args=args, kwargs=kwargs) + process.start() + else: + if args or kwargs: + logger.warning('ignoring args and kwargs for "command" target') + + assert isinstance(target, str), 'target must be a string to run as a command' + popen_args = shlex.split(target) + process = subprocess.Popen(popen_args) + return CombinedProcess(process) + + +def detect_target_type(target: Union[str, Callable[..., Any]]) -> "Literal['function', 'command']": + """ + Used by [`run_process`][watchfiles.run_process], [`arun_process`][watchfiles.arun_process] + and indirectly the CLI to determine the target type with `target_type` is `auto`. + + Detects the target type - either `function` or `command`. This method is only called with `target_type='auto'`. + + The following logic is employed: + + * If `target` is not a string, it is assumed to be a function + * If `target` ends with `.py` or `.sh`, it is assumed to be a command + * Otherwise, the target is assumed to be a function if it matches the regex `[a-zA-Z0-9_]+(\\.[a-zA-Z0-9_]+)+` + + If this logic does not work for you, specify the target type explicitly using the `target_type` function argument + or `--target-type` command line argument. + + Args: + target: The target value + + Returns: + either `'function'` or `'command'` + """ + if not isinstance(target, str): + return 'function' + elif target.endswith(('.py', '.sh')): + return 'command' + elif re.fullmatch(r'[a-zA-Z0-9_]+(\.[a-zA-Z0-9_]+)+', target): + return 'function' + else: + return 'command' + + +class CombinedProcess: + def __init__(self, p: 'Union[SpawnProcess, subprocess.Popen[bytes]]'): + self._p = p + assert self.pid is not None, 'process not yet spawned' + + def stop(self, sigint_timeout: int = 5, sigkill_timeout: int = 1) -> None: + os.environ.pop('WATCHFILES_CHANGES', None) + if self.is_alive(): + logger.debug('stopping process...') + + os.kill(self.pid, signal.SIGINT) + + try: + self.join(sigint_timeout) + except subprocess.TimeoutExpired: + # Capture this exception to allow the self.exitcode to be reached. + # This will allow the SIGKILL to be sent, otherwise it is swallowed up. + logger.warning('SIGINT timed out after %r seconds', sigint_timeout) + pass + + if self.exitcode is None: + logger.warning('process has not terminated, sending SIGKILL') + os.kill(self.pid, signal.SIGKILL) + self.join(sigkill_timeout) + else: + logger.debug('process stopped') + else: + logger.warning('process already dead, exit code: %d', self.exitcode) + + def is_alive(self) -> bool: + if isinstance(self._p, SpawnProcess): + return self._p.is_alive() + else: + return self._p.poll() is None + + @property + def pid(self) -> int: + # we check the process has always been spawned when CombinedProcess is initialised + return self._p.pid # type: ignore[return-value] + + def join(self, timeout: int) -> None: + if isinstance(self._p, SpawnProcess): + self._p.join(timeout) + else: + self._p.wait(timeout) + + @property + def exitcode(self) -> Optional[int]: + if isinstance(self._p, SpawnProcess): + return self._p.exitcode + else: + return self._p.returncode + + +def run_function(function: str, tty_path: Optional[str], args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> None: + with set_tty(tty_path): + func = import_string(function) + func(*args, **kwargs) + + +def import_string(dotted_path: str) -> Any: + """ + Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the + last name in the path. Raise ImportError if the import fails. + """ + try: + module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) + except ValueError as e: + raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e + + module = import_module(module_path) + try: + return getattr(module, class_name) + except AttributeError as e: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e + + +def get_tty_path() -> Optional[str]: # pragma: no cover + """ + Return the path to the current TTY, if any. + + Virtually impossible to test in pytest, hence no cover. + """ + try: + return os.ttyname(sys.stdin.fileno()) + except OSError: + # fileno() always fails with pytest + return '/dev/tty' + except AttributeError: + # on Windows. No idea of a better solution + return None + + +@contextlib.contextmanager +def set_tty(tty_path: Optional[str]) -> Generator[None, None, None]: + if tty_path: + try: + with open(tty_path) as tty: # pragma: no cover + sys.stdin = tty + yield + except OSError: + # eg. "No such device or address: '/dev/tty'", see https://github.com/samuelcolvin/watchfiles/issues/40 + yield + else: + # currently on windows tty_path is None and there's nothing we can do here + yield diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/version.py new file mode 100644 index 00000000..f55721f1 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/watchfiles/version.py @@ -0,0 +1,5 @@ +from ._rust_notify import __version__ + +__all__ = ('VERSION',) + +VERSION = __version__ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/INSTALLER b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/LICENSE b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/LICENSE new file mode 100644 index 00000000..119b29ef --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2013-2021 Aymeric Augustin and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of websockets nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/METADATA b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/METADATA new file mode 100644 index 00000000..3b042a3f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/METADATA @@ -0,0 +1,174 @@ +Metadata-Version: 2.1 +Name: websockets +Version: 10.3 +Summary: An implementation of the WebSocket Protocol (RFC 6455 & 7692) +Home-page: https://github.com/aaugustin/websockets +Author: Aymeric Augustin +Author-email: aymeric.augustin@m4x.org +License: BSD +Project-URL: Changelog, https://websockets.readthedocs.io/en/stable/project/changelog.html +Project-URL: Documentation, https://websockets.readthedocs.io/ +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=readme +Project-URL: Tracker, https://github.com/aaugustin/websockets/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.7 +License-File: LICENSE + +.. image:: logo/horizontal.svg + :width: 480px + :alt: websockets + +|licence| |version| |pyversions| |wheel| |tests| |docs| + +.. |licence| image:: https://img.shields.io/pypi/l/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |version| image:: https://img.shields.io/pypi/v/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |pyversions| image:: https://img.shields.io/pypi/pyversions/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |wheel| image:: https://img.shields.io/pypi/wheel/websockets.svg + :target: https://pypi.python.org/pypi/websockets + +.. |tests| image:: https://img.shields.io/github/checks-status/aaugustin/websockets/main + :target: https://github.com/aaugustin/websockets/actions/workflows/tests.yml + +.. |docs| image:: https://img.shields.io/readthedocs/websockets.svg + :target: https://websockets.readthedocs.io/ + +What is ``websockets``? +----------------------- + +websockets is a library for building WebSocket_ servers and clients in Python +with a focus on correctness, simplicity, robustness, and performance. + +.. _WebSocket: https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API + +Built on top of ``asyncio``, Python's standard asynchronous I/O framework, it +provides an elegant coroutine-based API. + +`Documentation is available on Read the Docs. `_ + +Here's how a client sends and receives messages: + +.. copy-pasted because GitHub doesn't support the include directive + +.. code:: python + + #!/usr/bin/env python + + import asyncio + from websockets import connect + + async def hello(uri): + async with connect(uri) as websocket: + await websocket.send("Hello world!") + await websocket.recv() + + asyncio.run(hello("ws://localhost:8765")) + +And here's an echo server: + +.. code:: python + + #!/usr/bin/env python + + import asyncio + from websockets import serve + + async def echo(websocket): + async for message in websocket: + await websocket.send(message) + + async def main(): + async with serve(echo, "localhost", 8765): + await asyncio.Future() # run forever + + asyncio.run(main()) + +Does that look good? + +`Get started with the tutorial! `_ + +Why should I use ``websockets``? +-------------------------------- + +The development of ``websockets`` is shaped by four principles: + +1. **Correctness**: ``websockets`` is heavily tested for compliance + with :rfc:`6455`. Continuous integration fails under 100% branch + coverage. + +2. **Simplicity**: all you need to understand is ``msg = await ws.recv()`` and + ``await ws.send(msg)``. ``websockets`` takes care of managing connections + so you can focus on your application. + +3. **Robustness**: ``websockets`` is built for production. For example, it was + the only library to `handle backpressure correctly`_ before the issue + became widely known in the Python community. + +4. **Performance**: memory usage is optimized and configurable. A C extension + accelerates expensive operations. It's pre-compiled for Linux, macOS and + Windows and packaged in the wheel format for each system and Python version. + +Documentation is a first class concern in the project. Head over to `Read the +Docs`_ and see for yourself. + +.. _Read the Docs: https://websockets.readthedocs.io/ +.. _handle backpressure correctly: https://vorpus.org/blog/some-thoughts-on-asynchronous-api-design-in-a-post-asyncawait-world/#websocket-servers + +Why shouldn't I use ``websockets``? +----------------------------------- + +* If you prefer callbacks over coroutines: ``websockets`` was created to + provide the best coroutine-based API to manage WebSocket connections in + Python. Pick another library for a callback-based API. + +* If you're looking for a mixed HTTP / WebSocket library: ``websockets`` aims + at being an excellent implementation of :rfc:`6455`: The WebSocket Protocol + and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP + is minimal — just enough for a HTTP health check. + + If you want to do both in the same server, look at HTTP frameworks that + build on top of ``websockets`` to support WebSocket connections, like + Sanic_. + +.. _Sanic: https://sanicframework.org/en/ + +What else? +---------- + +Bug reports, patches and suggestions are welcome! + +To report a security vulnerability, please use the `Tidelift security +contact`_. Tidelift will coordinate the fix and disclosure. + +.. _Tidelift security contact: https://tidelift.com/security + +For anything else, please open an issue_ or send a `pull request`_. + +.. _issue: https://github.com/aaugustin/websockets/issues/new +.. _pull request: https://github.com/aaugustin/websockets/compare/ + +Participants must uphold the `Contributor Covenant code of conduct`_. + +.. _Contributor Covenant code of conduct: https://github.com/aaugustin/websockets/blob/main/CODE_OF_CONDUCT.md + +``websockets`` is released under the `BSD license`_. + +.. _BSD license: https://github.com/aaugustin/websockets/blob/main/LICENSE + + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/RECORD b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/RECORD new file mode 100644 index 00000000..c959a8a3 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/RECORD @@ -0,0 +1,68 @@ +websockets-10.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +websockets-10.3.dist-info/LICENSE,sha256=kG_hZrUJEDUNhf1QXczOB8McRhHHTRKtrzWz42bfX1A,1561 +websockets-10.3.dist-info/METADATA,sha256=WmVHPSlLRfuF4mgoL8iR6ZcWzt28qG077rtpGHsF6XU,6309 +websockets-10.3.dist-info/RECORD,, +websockets-10.3.dist-info/WHEEL,sha256=OmDKD3JmZjhMu_YdpkiJDfFL1rf8n9nexhWCGhSBGVE,101 +websockets-10.3.dist-info/top_level.txt,sha256=AmmQQqhFwjoCEKzNajXY5f28yxLP_cgVflJES82E_cs,51 +websockets/__init__.py,sha256=JjTVh9iIoob-Gjyt6gYG9AGGUACdwgRmplR4OUzuyf8,3550 +websockets/__main__.py,sha256=-7FCp_ucYi9s3JgRZ5ba23-SwTVu49Lnm1L4GKcEBcQ,7485 +websockets/__pycache__/__init__.cpython-37.pyc,, +websockets/__pycache__/__main__.cpython-37.pyc,, +websockets/__pycache__/auth.cpython-37.pyc,, +websockets/__pycache__/client.cpython-37.pyc,, +websockets/__pycache__/connection.cpython-37.pyc,, +websockets/__pycache__/datastructures.cpython-37.pyc,, +websockets/__pycache__/exceptions.cpython-37.pyc,, +websockets/__pycache__/frames.cpython-37.pyc,, +websockets/__pycache__/headers.cpython-37.pyc,, +websockets/__pycache__/http.cpython-37.pyc,, +websockets/__pycache__/http11.cpython-37.pyc,, +websockets/__pycache__/imports.cpython-37.pyc,, +websockets/__pycache__/server.cpython-37.pyc,, +websockets/__pycache__/streams.cpython-37.pyc,, +websockets/__pycache__/typing.cpython-37.pyc,, +websockets/__pycache__/uri.cpython-37.pyc,, +websockets/__pycache__/utils.cpython-37.pyc,, +websockets/__pycache__/version.cpython-37.pyc,, +websockets/auth.py,sha256=U8oQCEDDnPantIlLEYyjW3X6EtPLH3Vmw_OyapuXaQM,151 +websockets/client.py,sha256=lyPsC7I9ASv6FlFcnAMWmrRwRxWA122q1VwfAqSCL2Y,12305 +websockets/connection.py,sha256=-rhs_Mq75xfPDEJkk_vegtze9dkuqZNwBofOrya8spQ,24367 +websockets/datastructures.py,sha256=b-J8Hq-Ori5zXkrdFpV6iXh2kGFrf8m4PHKp_49zmXc,5938 +websockets/exceptions.py,sha256=lwKzfdZLmr2gRzu3P_vpQLAh6LKcPqKLg7UjxW99c3c,10447 +websockets/extensions/__init__.py,sha256=HdQaQhOVkCR5RJVRKXG5Xmb6cMpAhLaKgRikYRzO64E,102 +websockets/extensions/__pycache__/__init__.cpython-37.pyc,, +websockets/extensions/__pycache__/base.cpython-37.pyc,, +websockets/extensions/__pycache__/permessage_deflate.cpython-37.pyc,, +websockets/extensions/base.py,sha256=vbbMikoNFH8sQpsED2NYXuY_2t1CqE1waUvZQY5ZuQY,3229 +websockets/extensions/permessage_deflate.py,sha256=iuzOzLQMuzjGEAvODk9dyfYtxw7qnW6xtqgnDsCS8kM,25444 +websockets/frames.py,sha256=SNOMWoPR1e-9IYoVeFNt-II307dysVXQS1FJCF8oAXA,12824 +websockets/headers.py,sha256=ZSdzxVAcJ_O4fEg-PK3v7uuMWoOOR936TOdmt2ceg4U,16707 +websockets/http.py,sha256=yGB2GWFxQk52lRmzLWC5ks2U6Eej39n6rIYF-nff7jw,674 +websockets/http11.py,sha256=ChjoAc3A1frS8I6Ra4Jk_yt_LO7cM53z_bRmVxxmd-g,13056 +websockets/imports.py,sha256=9BnKfdMY-4RW_uRv_DkOhDs1i6ZhrWIp245QWZagajc,2889 +websockets/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets/legacy/__pycache__/__init__.cpython-37.pyc,, +websockets/legacy/__pycache__/auth.cpython-37.pyc,, +websockets/legacy/__pycache__/client.cpython-37.pyc,, +websockets/legacy/__pycache__/compatibility.cpython-37.pyc,, +websockets/legacy/__pycache__/framing.cpython-37.pyc,, +websockets/legacy/__pycache__/handshake.cpython-37.pyc,, +websockets/legacy/__pycache__/http.cpython-37.pyc,, +websockets/legacy/__pycache__/protocol.cpython-37.pyc,, +websockets/legacy/__pycache__/server.cpython-37.pyc,, +websockets/legacy/auth.py,sha256=Elv_O4vxQevBakNwakbgO4o_XIPgJdVnyAU9tMPyhgU,6665 +websockets/legacy/client.py,sha256=_EKhKMoozOnN9Mh6s8x4FsBBadK8vGarhY3qwatFKy0,26718 +websockets/legacy/compatibility.py,sha256=kFh6P2833WNrCi2VrlvTRSvI5Lb1sKziLodfXY2ZX2c,327 +websockets/legacy/framing.py,sha256=caDXTke4l0wzoGtFsXSdGbBjkmR_yocC71NbkPgl1RM,5195 +websockets/legacy/handshake.py,sha256=hsMPP45ibzSkc0tQd0hKJ0HYk0TJqdq7ATyjXqTDs_g,5641 +websockets/legacy/http.py,sha256=kXKCpXKCjISBgrXWjJqTThlhEjZlPIQgCV330GoaYCM,7131 +websockets/legacy/protocol.py,sha256=-FkJB9BG_SQMB415O2fyxvV3auCWUfon2glUZ3C47-E,63434 +websockets/legacy/server.py,sha256=UK_F7K_hhaCdOGb7cdczJv1rjmqHbEOrMOl3hIAagTY,44234 +websockets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +websockets/server.py,sha256=1h2tlOrbH7DG86qUGitGmx_HhWPL0NXeR_jHBYDq0rs,19110 +websockets/speedups.cp37-win_amd64.pyd,sha256=fwaos1nrC9tZ-HmNfq-JyOb40vgLa_H7yR2ClLggjf8,11264 +websockets/streams.py,sha256=PbVlfaUjsRo70fO6zOnvae-9M3Bnh2PUu43f27WHknc,4189 +websockets/typing.py,sha256=YNw2qGqZgZTMB4kqfxincSX0xfZMT_QglvZFUXwDucs,1444 +websockets/uri.py,sha256=iG2uPx953hEm4JM1NZLrULtCcAzb6TCUe0sx3O_gecQ,3309 +websockets/utils.py,sha256=nz3tb3CXB6xtssMM2fUFDXPqHPq1XeNRFog9tAzJpPk,1201 +websockets/version.py,sha256=buwxuegDY32aazHDKEl6Fu5pNHgJKKbjmFRuPB-qXt8,2799 diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/WHEEL b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/WHEEL new file mode 100644 index 00000000..54091f94 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp37-cp37m-win_amd64 + diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/top_level.txt b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/top_level.txt new file mode 100644 index 00000000..5474af74 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets-10.3.dist-info/top_level.txt @@ -0,0 +1,3 @@ +websockets +websockets/extensions +websockets/legacy diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__init__.py new file mode 100644 index 00000000..ec348412 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__init__.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from .imports import lazy_import +from .version import version as __version__ # noqa + + +__all__ = [ # noqa + "AbortHandshake", + "basic_auth_protocol_factory", + "BasicAuthWebSocketServerProtocol", + "broadcast", + "ClientConnection", + "connect", + "ConnectionClosed", + "ConnectionClosedError", + "ConnectionClosedOK", + "Data", + "DuplicateParameter", + "ExtensionName", + "ExtensionParameter", + "InvalidHandshake", + "InvalidHeader", + "InvalidHeaderFormat", + "InvalidHeaderValue", + "InvalidMessage", + "InvalidOrigin", + "InvalidParameterName", + "InvalidParameterValue", + "InvalidState", + "InvalidStatus", + "InvalidStatusCode", + "InvalidUpgrade", + "InvalidURI", + "LoggerLike", + "NegotiationError", + "Origin", + "parse_uri", + "PayloadTooBig", + "ProtocolError", + "RedirectHandshake", + "SecurityError", + "serve", + "ServerConnection", + "Subprotocol", + "unix_connect", + "unix_serve", + "WebSocketClientProtocol", + "WebSocketCommonProtocol", + "WebSocketException", + "WebSocketProtocolError", + "WebSocketServer", + "WebSocketServerProtocol", + "WebSocketURI", +] + +lazy_import( + globals(), + aliases={ + "auth": ".legacy", + "basic_auth_protocol_factory": ".legacy.auth", + "BasicAuthWebSocketServerProtocol": ".legacy.auth", + "broadcast": ".legacy.protocol", + "ClientConnection": ".client", + "connect": ".legacy.client", + "unix_connect": ".legacy.client", + "WebSocketClientProtocol": ".legacy.client", + "Headers": ".datastructures", + "MultipleValuesError": ".datastructures", + "WebSocketException": ".exceptions", + "ConnectionClosed": ".exceptions", + "ConnectionClosedError": ".exceptions", + "ConnectionClosedOK": ".exceptions", + "InvalidHandshake": ".exceptions", + "SecurityError": ".exceptions", + "InvalidMessage": ".exceptions", + "InvalidHeader": ".exceptions", + "InvalidHeaderFormat": ".exceptions", + "InvalidHeaderValue": ".exceptions", + "InvalidOrigin": ".exceptions", + "InvalidUpgrade": ".exceptions", + "InvalidStatus": ".exceptions", + "InvalidStatusCode": ".exceptions", + "NegotiationError": ".exceptions", + "DuplicateParameter": ".exceptions", + "InvalidParameterName": ".exceptions", + "InvalidParameterValue": ".exceptions", + "AbortHandshake": ".exceptions", + "RedirectHandshake": ".exceptions", + "InvalidState": ".exceptions", + "InvalidURI": ".exceptions", + "PayloadTooBig": ".exceptions", + "ProtocolError": ".exceptions", + "WebSocketProtocolError": ".exceptions", + "protocol": ".legacy", + "WebSocketCommonProtocol": ".legacy.protocol", + "ServerConnection": ".server", + "serve": ".legacy.server", + "unix_serve": ".legacy.server", + "WebSocketServerProtocol": ".legacy.server", + "WebSocketServer": ".legacy.server", + "Data": ".typing", + "LoggerLike": ".typing", + "Origin": ".typing", + "ExtensionHeader": ".typing", + "ExtensionParameter": ".typing", + "Subprotocol": ".typing", + }, + deprecated_aliases={ + "framing": ".legacy", + "handshake": ".legacy", + "parse_uri": ".uri", + "WebSocketURI": ".uri", + }, +) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__main__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__main__.py new file mode 100644 index 00000000..c562d21b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/__main__.py @@ -0,0 +1,230 @@ +from __future__ import annotations + +import argparse +import asyncio +import os +import signal +import sys +import threading +from typing import Any, Set + +from .exceptions import ConnectionClosed +from .frames import Close +from .legacy.client import connect +from .version import version as websockets_version + + +if sys.platform == "win32": + + def win_enable_vt100() -> None: + """ + Enable VT-100 for console output on Windows. + + See also https://bugs.python.org/issue29059. + + """ + import ctypes + + STD_OUTPUT_HANDLE = ctypes.c_uint(-11) + INVALID_HANDLE_VALUE = ctypes.c_uint(-1) + ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004 + + handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) + if handle == INVALID_HANDLE_VALUE: + raise RuntimeError("unable to obtain stdout handle") + + cur_mode = ctypes.c_uint() + if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0: + raise RuntimeError("unable to query current console mode") + + # ctypes ints lack support for the required bit-OR operation. + # Temporarily convert to Py int, do the OR and convert back. + py_int_mode = int.from_bytes(cur_mode, sys.byteorder) + new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING) + + if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0: + raise RuntimeError("unable to set console mode") + + +def exit_from_event_loop_thread( + loop: asyncio.AbstractEventLoop, + stop: asyncio.Future[None], +) -> None: + loop.stop() + if not stop.done(): + # When exiting the thread that runs the event loop, raise + # KeyboardInterrupt in the main thread to exit the program. + if sys.platform == "win32": + ctrl_c = signal.CTRL_C_EVENT + else: + ctrl_c = signal.SIGINT + os.kill(os.getpid(), ctrl_c) + + +def print_during_input(string: str) -> None: + sys.stdout.write( + # Save cursor position + "\N{ESC}7" + # Add a new line + "\N{LINE FEED}" + # Move cursor up + "\N{ESC}[A" + # Insert blank line, scroll last line down + "\N{ESC}[L" + # Print string in the inserted blank line + f"{string}\N{LINE FEED}" + # Restore cursor position + "\N{ESC}8" + # Move cursor down + "\N{ESC}[B" + ) + sys.stdout.flush() + + +def print_over_input(string: str) -> None: + sys.stdout.write( + # Move cursor to beginning of line + "\N{CARRIAGE RETURN}" + # Delete current line + "\N{ESC}[K" + # Print string + f"{string}\N{LINE FEED}" + ) + sys.stdout.flush() + + +async def run_client( + uri: str, + loop: asyncio.AbstractEventLoop, + inputs: asyncio.Queue[str], + stop: asyncio.Future[None], +) -> None: + try: + websocket = await connect(uri) + except Exception as exc: + print_over_input(f"Failed to connect to {uri}: {exc}.") + exit_from_event_loop_thread(loop, stop) + return + else: + print_during_input(f"Connected to {uri}.") + + try: + while True: + incoming: asyncio.Future[Any] = asyncio.create_task(websocket.recv()) + outgoing: asyncio.Future[Any] = asyncio.create_task(inputs.get()) + done: Set[asyncio.Future[Any]] + pending: Set[asyncio.Future[Any]] + done, pending = await asyncio.wait( + [incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED + ) + + # Cancel pending tasks to avoid leaking them. + if incoming in pending: + incoming.cancel() + if outgoing in pending: + outgoing.cancel() + + if incoming in done: + try: + message = incoming.result() + except ConnectionClosed: + break + else: + if isinstance(message, str): + print_during_input("< " + message) + else: + print_during_input("< (binary) " + message.hex()) + + if outgoing in done: + message = outgoing.result() + await websocket.send(message) + + if stop in done: + break + + finally: + await websocket.close() + assert websocket.close_code is not None and websocket.close_reason is not None + close_status = Close(websocket.close_code, websocket.close_reason) + + print_over_input(f"Connection closed: {close_status}.") + + exit_from_event_loop_thread(loop, stop) + + +def main() -> None: + # Parse command line arguments. + parser = argparse.ArgumentParser( + prog="python -m websockets", + description="Interactive WebSocket client.", + add_help=False, + ) + group = parser.add_mutually_exclusive_group() + group.add_argument("--version", action="store_true") + group.add_argument("uri", metavar="", nargs="?") + args = parser.parse_args() + + if args.version: + print(f"websockets {websockets_version}") + return + + if args.uri is None: + parser.error("the following arguments are required: ") + + # If we're on Windows, enable VT100 terminal support. + if sys.platform == "win32": + try: + win_enable_vt100() + except RuntimeError as exc: + sys.stderr.write( + f"Unable to set terminal to VT100 mode. This is only " + f"supported since Win10 anniversary update. Expect " + f"weird symbols on the terminal.\nError: {exc}\n" + ) + sys.stderr.flush() + + try: + import readline # noqa + except ImportError: # Windows has no `readline` normally + pass + + # Create an event loop that will run in a background thread. + loop = asyncio.new_event_loop() + + # Due to zealous removal of the loop parameter in the Queue constructor, + # we need a factory coroutine to run in the freshly created event loop. + async def queue_factory() -> asyncio.Queue[str]: + return asyncio.Queue() + + # Create a queue of user inputs. There's no need to limit its size. + inputs: asyncio.Queue[str] = loop.run_until_complete(queue_factory()) + + # Create a stop condition when receiving SIGINT or SIGTERM. + stop: asyncio.Future[None] = loop.create_future() + + # Schedule the task that will manage the connection. + loop.create_task(run_client(args.uri, loop, inputs, stop)) + + # Start the event loop in a background thread. + thread = threading.Thread(target=loop.run_forever) + thread.start() + + # Read from stdin in the main thread in order to receive signals. + try: + while True: + # Since there's no size limit, put_nowait is identical to put. + message = input("> ") + loop.call_soon_threadsafe(inputs.put_nowait, message) + except (KeyboardInterrupt, EOFError): # ^C, ^D + loop.call_soon_threadsafe(stop.set_result, None) + + # Wait for the event loop to terminate. + thread.join() + + # For reasons unclear, even though the loop is closed in the thread, + # it still thinks it's running here. + loop.close() + + +if __name__ == "__main__": + main() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/auth.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/auth.py new file mode 100644 index 00000000..afcb38cf --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/auth.py @@ -0,0 +1,4 @@ +from __future__ import annotations + +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.auth import * # noqa diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/client.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/client.py new file mode 100644 index 00000000..df8e5342 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/client.py @@ -0,0 +1,347 @@ +from __future__ import annotations + +from typing import Generator, List, Optional, Sequence + +from .connection import CLIENT, CONNECTING, OPEN, Connection, State +from .datastructures import Headers, MultipleValuesError +from .exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidHeaderValue, + InvalidStatus, + InvalidUpgrade, + NegotiationError, +) +from .extensions import ClientExtensionFactory, Extension +from .headers import ( + build_authorization_basic, + build_extension, + build_host, + build_subprotocol, + parse_connection, + parse_extension, + parse_subprotocol, + parse_upgrade, +) +from .http import USER_AGENT +from .http11 import Request, Response +from .typing import ( + ConnectionOption, + ExtensionHeader, + LoggerLike, + Origin, + Subprotocol, + UpgradeProtocol, +) +from .uri import WebSocketURI +from .utils import accept_key, generate_key + + +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.client import * # isort:skip # noqa + + +__all__ = ["ClientConnection"] + + +class ClientConnection(Connection): + """ + Sans-I/O implementation of a WebSocket client connection. + + Args: + wsuri: URI of the WebSocket server, parsed + with :func:`~websockets.uri.parse_uri`. + origin: value of the ``Origin`` header. This is useful when connecting + to a server that validates the ``Origin`` header to defend against + Cross-Site WebSocket Hijacking attacks. + extensions: list of supported extensions, in order in which they + should be tried. + subprotocols: list of supported subprotocols, in order of decreasing + preference. + state: initial state of the WebSocket connection. + max_size: maximum size of incoming messages in bytes; + :obj:`None` to disable the limit. + logger: logger for this connection; + defaults to ``logging.getLogger("websockets.client")``; + see the :doc:`logging guide <../topics/logging>` for details. + + """ + + def __init__( + self, + wsuri: WebSocketURI, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + state: State = CONNECTING, + max_size: Optional[int] = 2**20, + logger: Optional[LoggerLike] = None, + ): + super().__init__( + side=CLIENT, + state=state, + max_size=max_size, + logger=logger, + ) + self.wsuri = wsuri + self.origin = origin + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.key = generate_key() + + def connect(self) -> Request: # noqa: F811 + """ + Create a handshake request to open a connection. + + You must send the handshake request with :meth:`send_request`. + + You can modify it before sending it, for example to add HTTP headers. + + Returns: + Request: WebSocket handshake request event to send to the server. + + """ + headers = Headers() + + headers["Host"] = build_host( + self.wsuri.host, self.wsuri.port, self.wsuri.secure + ) + + if self.wsuri.user_info: + headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info) + + if self.origin is not None: + headers["Origin"] = self.origin + + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Key"] = self.key + headers["Sec-WebSocket-Version"] = "13" + + if self.available_extensions is not None: + extensions_header = build_extension( + [ + (extension_factory.name, extension_factory.get_request_params()) + for extension_factory in self.available_extensions + ] + ) + headers["Sec-WebSocket-Extensions"] = extensions_header + + if self.available_subprotocols is not None: + protocol_header = build_subprotocol(self.available_subprotocols) + headers["Sec-WebSocket-Protocol"] = protocol_header + + headers["User-Agent"] = USER_AGENT + + return Request(self.wsuri.resource_name, headers) + + def process_response(self, response: Response) -> None: + """ + Check a handshake response. + + Args: + request: WebSocket handshake response received from the server. + + Raises: + InvalidHandshake: if the handshake response is invalid. + + """ + + if response.status_code != 101: + raise InvalidStatus(response) + + headers = response.headers + + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade( + "Connection", ", ".join(connection) if connection else None + ) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. It's supposed to be 'WebSocket'. + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None) + + try: + s_w_accept = headers["Sec-WebSocket-Accept"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Accept") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Accept", + "more than one Sec-WebSocket-Accept header found", + ) from exc + + if s_w_accept != accept_key(self.key): + raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) + + self.extensions = self.process_extensions(headers) + + self.subprotocol = self.process_subprotocol(headers) + + def process_extensions(self, headers: Headers) -> List[Extension]: + """ + Handle the Sec-WebSocket-Extensions HTTP response header. + + Check that each extension is supported, as well as its parameters. + + :rfc:`6455` leaves the rules up to the specification of each + extension. + + To provide this level of flexibility, for each extension accepted by + the server, we check for a match with each extension available in the + client configuration. If no match is found, an exception is raised. + + If several variants of the same extension are accepted by the server, + it may be configured several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + Args: + headers: WebSocket handshake response headers. + + Returns: + List[Extension]: List of accepted extensions. + + Raises: + InvalidHandshake: to abort the handshake. + + """ + accepted_extensions: List[Extension] = [] + + extensions = headers.get_all("Sec-WebSocket-Extensions") + + if extensions: + + if self.available_extensions is None: + raise InvalidHandshake("no extensions supported") + + parsed_extensions: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in extensions], [] + ) + + for name, response_params in parsed_extensions: + + for extension_factory in self.available_extensions: + + # Skip non-matching extensions based on their name. + if extension_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + extension = extension_factory.process_response_params( + response_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the server sent. Fail the connection. + else: + raise NegotiationError( + f"Unsupported extension: " + f"name = {name}, params = {response_params}" + ) + + return accepted_extensions + + def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP response header. + + If provided, check that it contains exactly one supported subprotocol. + + Args: + headers: WebSocket handshake response headers. + + Returns: + Optional[Subprotocol]: Subprotocol, if one was selected. + + """ + subprotocol: Optional[Subprotocol] = None + + subprotocols = headers.get_all("Sec-WebSocket-Protocol") + + if subprotocols: + + if self.available_subprotocols is None: + raise InvalidHandshake("no subprotocols supported") + + parsed_subprotocols: Sequence[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in subprotocols], [] + ) + + if len(parsed_subprotocols) > 1: + subprotocols_display = ", ".join(parsed_subprotocols) + raise InvalidHandshake(f"multiple subprotocols: {subprotocols_display}") + + subprotocol = parsed_subprotocols[0] + + if subprotocol not in self.available_subprotocols: + raise NegotiationError(f"unsupported subprotocol: {subprotocol}") + + return subprotocol + + def send_request(self, request: Request) -> None: + """ + Send a handshake request to the server. + + Args: + request: WebSocket handshake request event. + + """ + if self.debug: + self.logger.debug("> GET %s HTTP/1.1", request.path) + for key, value in request.headers.raw_items(): + self.logger.debug("> %s: %s", key, value) + + self.writes.append(request.serialize()) + + def parse(self) -> Generator[None, None, None]: + if self.state is CONNECTING: + response = yield from Response.parse( + self.reader.read_line, + self.reader.read_exact, + self.reader.read_to_eof, + ) + + if self.debug: + code, phrase = response.status_code, response.reason_phrase + self.logger.debug("< HTTP/1.1 %d %s", code, phrase) + for key, value in response.headers.raw_items(): + self.logger.debug("< %s: %s", key, value) + if response.body is not None: + self.logger.debug("< [body] (%d bytes)", len(response.body)) + + try: + self.process_response(response) + except InvalidHandshake as exc: + response._exception = exc + self.handshake_exc = exc + self.parser = self.discard() + next(self.parser) # start coroutine + else: + assert self.state is CONNECTING + self.state = OPEN + finally: + self.events.append(response) + + yield from super().parse() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/connection.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/connection.py new file mode 100644 index 00000000..db8b5369 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/connection.py @@ -0,0 +1,702 @@ +from __future__ import annotations + +import enum +import logging +import uuid +from typing import Generator, List, Optional, Type, Union + +from .exceptions import ( + ConnectionClosed, + ConnectionClosedError, + ConnectionClosedOK, + InvalidState, + PayloadTooBig, + ProtocolError, +) +from .extensions import Extension +from .frames import ( + OK_CLOSE_CODES, + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + Close, + Frame, +) +from .http11 import Request, Response +from .streams import StreamReader +from .typing import LoggerLike, Origin, Subprotocol + + +__all__ = [ + "Connection", + "Side", + "State", + "SEND_EOF", +] + +Event = Union[Request, Response, Frame] +"""Events that :meth:`~Connection.events_received` may return.""" + + +class Side(enum.IntEnum): + """A WebSocket connection is either a server or a client.""" + + SERVER, CLIENT = range(2) + + +SERVER = Side.SERVER +CLIENT = Side.CLIENT + + +class State(enum.IntEnum): + """A WebSocket connection is in one of these four states.""" + + CONNECTING, OPEN, CLOSING, CLOSED = range(4) + + +CONNECTING = State.CONNECTING +OPEN = State.OPEN +CLOSING = State.CLOSING +CLOSED = State.CLOSED + + +SEND_EOF = b"" +"""Sentinel signaling that the TCP connection must be half-closed.""" + + +class Connection: + """ + Sans-I/O implementation of a WebSocket connection. + + Args: + side: :attr:`~Side.CLIENT` or :attr:`~Side.SERVER`. + state: initial state of the WebSocket connection. + max_size: maximum size of incoming messages in bytes; + :obj:`None` to disable the limit. + logger: logger for this connection; depending on ``side``, + defaults to ``logging.getLogger("websockets.client")`` + or ``logging.getLogger("websockets.server")``; + see the :doc:`logging guide <../topics/logging>` for details. + + """ + + def __init__( + self, + side: Side, + state: State = OPEN, + max_size: Optional[int] = 2**20, + logger: Optional[LoggerLike] = None, + ) -> None: + # Unique identifier. For logs. + self.id: uuid.UUID = uuid.uuid4() + """Unique identifier of the connection. Useful in logs.""" + + # Logger or LoggerAdapter for this connection. + if logger is None: + logger = logging.getLogger(f"websockets.{side.name.lower()}") + self.logger: LoggerLike = logger + """Logger for this connection.""" + + # Track if DEBUG is enabled. Shortcut logging calls if it isn't. + self.debug = logger.isEnabledFor(logging.DEBUG) + + # Connection side. CLIENT or SERVER. + self.side = side + + # Connection state. Initially OPEN because subclasses handle CONNECTING. + self.state = state + + # Maximum size of incoming messages in bytes. + self.max_size = max_size + + # Current size of incoming message in bytes. Only set while reading a + # fragmented message i.e. a data frames with the FIN bit not set. + self.cur_size: Optional[int] = None + + # True while sending a fragmented message i.e. a data frames with the + # FIN bit not set. + self.expect_continuation_frame = False + + # WebSocket protocol parameters. + self.origin: Optional[Origin] = None + self.extensions: List[Extension] = [] + self.subprotocol: Optional[Subprotocol] = None + + # Close code and reason, set when a close frame is sent or received. + self.close_rcvd: Optional[Close] = None + self.close_sent: Optional[Close] = None + self.close_rcvd_then_sent: Optional[bool] = None + + # Track if an exception happened during the handshake. + self.handshake_exc: Optional[Exception] = None + """ + Exception to raise if the opening handshake failed. + + :obj:`None` if the opening handshake succeeded. + + """ + + # Track if send_eof() was called. + self.eof_sent = False + + # Parser state. + self.reader = StreamReader() + self.events: List[Event] = [] + self.writes: List[bytes] = [] + self.parser = self.parse() + next(self.parser) # start coroutine + self.parser_exc: Optional[Exception] = None + + @property + def state(self) -> State: + """ + WebSocket connection state. + + Defined in 4.1, 4.2, 7.1.3, and 7.1.4 of :rfc:`6455`. + + """ + return self._state + + @state.setter + def state(self, state: State) -> None: + if self.debug: + self.logger.debug("= connection is %s", state.name) + self._state = state + + @property + def close_code(self) -> Optional[int]: + """ + `WebSocket close code`_. + + .. _WebSocket close code: + https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5 + + :obj:`None` if the connection isn't closed yet. + + """ + if self.state is not CLOSED: + return None + elif self.close_rcvd is None: + return 1006 + else: + return self.close_rcvd.code + + @property + def close_reason(self) -> Optional[str]: + """ + `WebSocket close reason`_. + + .. _WebSocket close reason: + https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6 + + :obj:`None` if the connection isn't closed yet. + + """ + if self.state is not CLOSED: + return None + elif self.close_rcvd is None: + return "" + else: + return self.close_rcvd.reason + + @property + def close_exc(self) -> ConnectionClosed: + """ + Exception to raise when trying to interact with a closed connection. + + Don't raise this exception while the connection :attr:`state` + is :attr:`~websockets.connection.State.CLOSING`; wait until + it's :attr:`~websockets.connection.State.CLOSED`. + + Indeed, the exception includes the close code and reason, which are + known only once the connection is closed. + + Raises: + AssertionError: if the connection isn't closed yet. + + """ + assert self.state is CLOSED, "connection isn't closed yet" + exc_type: Type[ConnectionClosed] + if ( + self.close_rcvd is not None + and self.close_sent is not None + and self.close_rcvd.code in OK_CLOSE_CODES + and self.close_sent.code in OK_CLOSE_CODES + ): + exc_type = ConnectionClosedOK + else: + exc_type = ConnectionClosedError + exc: ConnectionClosed = exc_type( + self.close_rcvd, + self.close_sent, + self.close_rcvd_then_sent, + ) + # Chain to the exception raised in the parser, if any. + exc.__cause__ = self.parser_exc + return exc + + # Public methods for receiving data. + + def receive_data(self, data: bytes) -> None: + """ + Receive data from the network. + + After calling this method: + + - You must call :meth:`data_to_send` and send this data to the network. + - You should call :meth:`events_received` and process resulting events. + + Raises: + EOFError: if :meth:`receive_eof` was called earlier. + + """ + self.reader.feed_data(data) + next(self.parser) + + def receive_eof(self) -> None: + """ + Receive the end of the data stream from the network. + + After calling this method: + + - You must call :meth:`data_to_send` and send this data to the network. + - You aren't expected to call :meth:`events_received`; it won't return + any new events. + + Raises: + EOFError: if :meth:`receive_eof` was called earlier. + + """ + self.reader.feed_eof() + next(self.parser) + + # Public methods for sending events. + + def send_continuation(self, data: bytes, fin: bool) -> None: + """ + Send a `Continuation frame`_. + + .. _Continuation frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.6 + + Parameters: + data: payload containing the same kind of data + as the initial frame. + fin: FIN bit; set it to :obj:`True` if this is the last frame + of a fragmented message and to :obj:`False` otherwise. + + Raises: + ProtocolError: if a fragmented message isn't in progress. + + """ + if not self.expect_continuation_frame: + raise ProtocolError("unexpected continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(OP_CONT, data, fin)) + + def send_text(self, data: bytes, fin: bool = True) -> None: + """ + Send a `Text frame`_. + + .. _Text frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.6 + + Parameters: + data: payload containing text encoded with UTF-8. + fin: FIN bit; set it to :obj:`False` if this is the first frame of + a fragmented message. + + Raises: + ProtocolError: if a fragmented message is in progress. + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(OP_TEXT, data, fin)) + + def send_binary(self, data: bytes, fin: bool = True) -> None: + """ + Send a `Binary frame`_. + + .. _Binary frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.6 + + Parameters: + data: payload containing arbitrary binary data. + fin: FIN bit; set it to :obj:`False` if this is the first frame of + a fragmented message. + + Raises: + ProtocolError: if a fragmented message is in progress. + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(OP_BINARY, data, fin)) + + def send_close(self, code: Optional[int] = None, reason: str = "") -> None: + """ + Send a `Close frame`_. + + .. _Close frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.1 + + Parameters: + code: close code. + reason: close reason. + + Raises: + ProtocolError: if a fragmented message is being sent, if the code + isn't valid, or if a reason is provided without a code + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + if code is None: + if reason != "": + raise ProtocolError("cannot send a reason without a code") + close = Close(1005, "") + data = b"" + else: + close = Close(code, reason) + data = close.serialize() + # send_frame() guarantees that self.state is OPEN at this point. + # 7.1.3. The WebSocket Closing Handshake is Started + self.send_frame(Frame(OP_CLOSE, data)) + self.close_sent = close + self.state = CLOSING + + def send_ping(self, data: bytes) -> None: + """ + Send a `Ping frame`_. + + .. _Ping frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.2 + + Parameters: + data: payload containing arbitrary binary data. + + """ + self.send_frame(Frame(OP_PING, data)) + + def send_pong(self, data: bytes) -> None: + """ + Send a `Pong frame`_. + + .. _Pong frame: + https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.3 + + Parameters: + data: payload containing arbitrary binary data. + + """ + self.send_frame(Frame(OP_PONG, data)) + + def fail(self, code: int, reason: str = "") -> None: + """ + `Fail the WebSocket connection`_. + + .. _Fail the WebSocket connection: + https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.7 + + Parameters: + code: close code + reason: close reason + + Raises: + ProtocolError: if the code isn't valid. + """ + # 7.1.7. Fail the WebSocket Connection + + # Send a close frame when the state is OPEN (a close frame was already + # sent if it's CLOSING), except when failing the connection because + # of an error reading from or writing to the network. + if self.state is OPEN: + if code != 1006: + close = Close(code, reason) + data = close.serialize() + self.send_frame(Frame(OP_CLOSE, data)) + self.close_sent = close + self.state = CLOSING + + # When failing the connection, a server closes the TCP connection + # without waiting for the client to complete the handshake, while a + # client waits for the server to close the TCP connection, possibly + # after sending a close frame that the client will ignore. + if self.side is SERVER and not self.eof_sent: + self.send_eof() + + # 7.1.7. Fail the WebSocket Connection "An endpoint MUST NOT continue + # to attempt to process data(including a responding Close frame) from + # the remote endpoint after being instructed to _Fail the WebSocket + # Connection_." + self.parser = self.discard() + next(self.parser) # start coroutine + + # Public method for getting incoming events after receiving data. + + def events_received(self) -> List[Event]: + """ + Fetch events generated from data received from the network. + + Call this method immediately after any of the ``receive_*()`` methods. + + Process resulting events, likely by passing them to the application. + + Returns: + List[Event]: Events read from the connection. + """ + events, self.events = self.events, [] + return events + + # Public method for getting outgoing data after receiving data or sending events. + + def data_to_send(self) -> List[bytes]: + """ + Obtain data to send to the network. + + Call this method immediately after any of the ``receive_*()``, + ``send_*()``, or :meth:`fail` methods. + + Write resulting data to the connection. + + The empty bytestring :data:`~websockets.connection.SEND_EOF` signals + the end of the data stream. When you receive it, half-close the TCP + connection. + + Returns: + List[bytes]: Data to write to the connection. + + """ + writes, self.writes = self.writes, [] + return writes + + def close_expected(self) -> bool: + """ + Tell if the TCP connection is expected to close soon. + + Call this method immediately after any of the ``receive_*()`` or + :meth:`fail` methods. + + If it returns :obj:`True`, schedule closing the TCP connection after a + short timeout if the other side hasn't already closed it. + + Returns: + bool: Whether the TCP connection is expected to close soon. + + """ + # We expect a TCP close if and only if we sent a close frame: + # * Normal closure: once we send a close frame, we expect a TCP close: + # server waits for client to complete the TCP closing handshake; + # client waits for server to initiate the TCP closing handshake. + # * Abnormal closure: we always send a close frame and the same logic + # applies, except on EOFError where we don't send a close frame + # because we already received the TCP close, so we don't expect it. + # We already got a TCP Close if and only if the state is CLOSED. + return self.state is CLOSING or self.handshake_exc is not None + + # Private methods for receiving data. + + def parse(self) -> Generator[None, None, None]: + """ + Parse incoming data into frames. + + :meth:`receive_data` and :meth:`receive_eof` run this generator + coroutine until it needs more data or reaches EOF. + + """ + try: + while True: + if (yield from self.reader.at_eof()): + if self.debug: + self.logger.debug("< EOF") + # If the WebSocket connection is closed cleanly, with a + # closing handhshake, recv_frame() substitutes parse() + # with discard(). This branch is reached only when the + # connection isn't closed cleanly. + raise EOFError("unexpected end of stream") + + if self.max_size is None: + max_size = None + elif self.cur_size is None: + max_size = self.max_size + else: + max_size = self.max_size - self.cur_size + + # During a normal closure, execution ends here on the next + # iteration of the loop after receiving a close frame. At + # this point, recv_frame() replaced parse() by discard(). + frame = yield from Frame.parse( + self.reader.read_exact, + mask=self.side is SERVER, + max_size=max_size, + extensions=self.extensions, + ) + + if self.debug: + self.logger.debug("< %s", frame) + + self.recv_frame(frame) + + except ProtocolError as exc: + self.fail(1002, str(exc)) + self.parser_exc = exc + + except EOFError as exc: + self.fail(1006, str(exc)) + self.parser_exc = exc + + except UnicodeDecodeError as exc: + self.fail(1007, f"{exc.reason} at position {exc.start}") + self.parser_exc = exc + + except PayloadTooBig as exc: + self.fail(1009, str(exc)) + self.parser_exc = exc + + except Exception as exc: + self.logger.error("parser failed", exc_info=True) + # Don't include exception details, which may be security-sensitive. + self.fail(1011) + self.parser_exc = exc + + # During an abnormal closure, execution ends here after catching an + # exception. At this point, fail() replaced parse() by discard(). + yield + raise AssertionError("parse() shouldn't step after error") # pragma: no cover + + def discard(self) -> Generator[None, None, None]: + """ + Discard incoming data. + + This coroutine replaces :meth:`parse`: + + - after receiving a close frame, during a normal closure (1.4); + - after sending a close frame, during an abnormal closure (7.1.7). + + """ + # The server close the TCP connection in the same circumstances where + # discard() replaces parse(). The client closes the connection later, + # after the server closes the connection or a timeout elapses. + # (The latter case cannot be handled in this Sans-I/O layer.) + assert (self.side is SERVER) == (self.eof_sent) + while not (yield from self.reader.at_eof()): + self.reader.discard() + if self.debug: + self.logger.debug("< EOF") + # A server closes the TCP connection immediately, while a client + # waits for the server to close the TCP connection. + if self.side is CLIENT: + self.send_eof() + self.state = CLOSED + # If discard() completes normally, execution ends here. + yield + # Once the reader reaches EOF, its feed_data/eof() methods raise an + # error, so our receive_data/eof() methods don't step the generator. + raise AssertionError("discard() shouldn't step after EOF") # pragma: no cover + + def recv_frame(self, frame: Frame) -> None: + """ + Process an incoming frame. + + """ + if frame.opcode is OP_TEXT or frame.opcode is OP_BINARY: + if self.cur_size is not None: + raise ProtocolError("expected a continuation frame") + if frame.fin: + self.cur_size = None + else: + self.cur_size = len(frame.data) + + elif frame.opcode is OP_CONT: + if self.cur_size is None: + raise ProtocolError("unexpected continuation frame") + if frame.fin: + self.cur_size = None + else: + self.cur_size += len(frame.data) + + elif frame.opcode is OP_PING: + # 5.5.2. Ping: "Upon receipt of a Ping frame, an endpoint MUST + # send a Pong frame in response" + pong_frame = Frame(OP_PONG, frame.data) + self.send_frame(pong_frame) + + elif frame.opcode is OP_PONG: + # 5.5.3 Pong: "A response to an unsolicited Pong frame is not + # expected." + pass + + elif frame.opcode is OP_CLOSE: + # 7.1.5. The WebSocket Connection Close Code + # 7.1.6. The WebSocket Connection Close Reason + self.close_rcvd = Close.parse(frame.data) + if self.state is CLOSING: + assert self.close_sent is not None + self.close_rcvd_then_sent = False + + if self.cur_size is not None: + raise ProtocolError("incomplete fragmented message") + + # 5.5.1 Close: "If an endpoint receives a Close frame and did + # not previously send a Close frame, the endpoint MUST send a + # Close frame in response. (When sending a Close frame in + # response, the endpoint typically echos the status code it + # received.)" + + if self.state is OPEN: + # Echo the original data instead of re-serializing it with + # Close.serialize() because that fails when the close frame + # is empty and Close.parse() synthetizes a 1005 close code. + # The rest is identical to send_close(). + self.send_frame(Frame(OP_CLOSE, frame.data)) + self.close_sent = self.close_rcvd + self.close_rcvd_then_sent = True + self.state = CLOSING + + # 7.1.2. Start the WebSocket Closing Handshake: "Once an + # endpoint has both sent and received a Close control frame, + # that endpoint SHOULD _Close the WebSocket Connection_" + + # A server closes the TCP connection immediately, while a client + # waits for the server to close the TCP connection. + if self.side is SERVER: + self.send_eof() + + # 1.4. Closing Handshake: "after receiving a control frame + # indicating the connection should be closed, a peer discards + # any further data received." + self.parser = self.discard() + next(self.parser) # start coroutine + + else: # pragma: no cover + # This can't happen because Frame.parse() validates opcodes. + raise AssertionError(f"unexpected opcode: {frame.opcode:02x}") + + self.events.append(frame) + + # Private methods for sending events. + + def send_frame(self, frame: Frame) -> None: + if self.state is not OPEN: + raise InvalidState( + f"cannot write to a WebSocket in the {self.state.name} state" + ) + + if self.debug: + self.logger.debug("> %s", frame) + self.writes.append( + frame.serialize(mask=self.side is CLIENT, extensions=self.extensions) + ) + + def send_eof(self) -> None: + assert not self.eof_sent + self.eof_sent = True + if self.debug: + self.logger.debug("> EOF") + self.writes.append(SEND_EOF) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/datastructures.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/datastructures.py new file mode 100644 index 00000000..36a2cbaf --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/datastructures.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +import sys +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Tuple, + Union, +) + + +if sys.version_info[:2] >= (3, 8): + from typing import Protocol +else: # pragma: no cover + Protocol = object # mypy will report errors on Python 3.7. + + +__all__ = ["Headers", "HeadersLike", "MultipleValuesError"] + + +class MultipleValuesError(LookupError): + """ + Exception raised when :class:`Headers` has more than one value for a key. + + """ + + def __str__(self) -> str: + # Implement the same logic as KeyError_str in Objects/exceptions.c. + if len(self.args) == 1: + return repr(self.args[0]) + return super().__str__() + + +class Headers(MutableMapping[str, str]): + """ + Efficient data structure for manipulating HTTP headers. + + A :class:`list` of ``(name, values)`` is inefficient for lookups. + + A :class:`dict` doesn't suffice because header names are case-insensitive + and multiple occurrences of headers with the same name are possible. + + :class:`Headers` stores HTTP headers in a hybrid data structure to provide + efficient insertions and lookups while preserving the original data. + + In order to account for multiple values with minimal hassle, + :class:`Headers` follows this logic: + + - When getting a header with ``headers[name]``: + - if there's no value, :exc:`KeyError` is raised; + - if there's exactly one value, it's returned; + - if there's more than one value, :exc:`MultipleValuesError` is raised. + + - When setting a header with ``headers[name] = value``, the value is + appended to the list of values for that header. + + - When deleting a header with ``del headers[name]``, all values for that + header are removed (this is slow). + + Other methods for manipulating headers are consistent with this logic. + + As long as no header occurs multiple times, :class:`Headers` behaves like + :class:`dict`, except keys are lower-cased to provide case-insensitivity. + + Two methods support manipulating multiple values explicitly: + + - :meth:`get_all` returns a list of all values for a header; + - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs. + + """ + + __slots__ = ["_dict", "_list"] + + # Like dict, Headers accepts an optional "mapping or iterable" argument. + def __init__(self, *args: HeadersLike, **kwargs: str) -> None: + self._dict: Dict[str, List[str]] = {} + self._list: List[Tuple[str, str]] = [] + self.update(*args, **kwargs) + + def __str__(self) -> str: + return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._list!r})" + + def copy(self) -> Headers: + copy = self.__class__() + copy._dict = self._dict.copy() + copy._list = self._list.copy() + return copy + + def serialize(self) -> bytes: + # Since headers only contain ASCII characters, we can keep this simple. + return str(self).encode() + + # Collection methods + + def __contains__(self, key: object) -> bool: + return isinstance(key, str) and key.lower() in self._dict + + def __iter__(self) -> Iterator[str]: + return iter(self._dict) + + def __len__(self) -> int: + return len(self._dict) + + # MutableMapping methods + + def __getitem__(self, key: str) -> str: + value = self._dict[key.lower()] + if len(value) == 1: + return value[0] + else: + raise MultipleValuesError(key) + + def __setitem__(self, key: str, value: str) -> None: + self._dict.setdefault(key.lower(), []).append(value) + self._list.append((key, value)) + + def __delitem__(self, key: str) -> None: + key_lower = key.lower() + self._dict.__delitem__(key_lower) + # This is inefficient. Fortunately deleting HTTP headers is uncommon. + self._list = [(k, v) for k, v in self._list if k.lower() != key_lower] + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Headers): + return NotImplemented + return self._dict == other._dict + + def clear(self) -> None: + """ + Remove all headers. + + """ + self._dict = {} + self._list = [] + + def update(self, *args: HeadersLike, **kwargs: str) -> None: + """ + Update from a :class:`Headers` instance and/or keyword arguments. + + """ + args = tuple( + arg.raw_items() if isinstance(arg, Headers) else arg for arg in args + ) + super().update(*args, **kwargs) + + # Methods for handling multiple values + + def get_all(self, key: str) -> List[str]: + """ + Return the (possibly empty) list of all values for a header. + + Args: + key: header name. + + """ + return self._dict.get(key.lower(), []) + + def raw_items(self) -> Iterator[Tuple[str, str]]: + """ + Return an iterator of all values as ``(name, value)`` pairs. + + """ + return iter(self._list) + + +# copy of _typeshed.SupportsKeysAndGetItem. +class SupportsKeysAndGetItem(Protocol): # pragma: no cover + """ + Dict-like types with ``keys() -> str`` and ``__getitem__(key: str) -> str`` methods. + + """ + + def keys(self) -> Iterable[str]: + ... + + def __getitem__(self, key: str) -> str: + ... + + +HeadersLike = Union[ + Headers, + Mapping[str, str], + Iterable[Tuple[str, str]], + SupportsKeysAndGetItem, +] +""" +Types accepted where :class:`Headers` is expected. + +In addition to :class:`Headers` itself, this includes dict-like types where both +keys and values are :class:`str`. + +""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/exceptions.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/exceptions.py new file mode 100644 index 00000000..0c4fc518 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/exceptions.py @@ -0,0 +1,398 @@ +""" +:mod:`websockets.exceptions` defines the following exception hierarchy: + +* :exc:`WebSocketException` + * :exc:`ConnectionClosed` + * :exc:`ConnectionClosedError` + * :exc:`ConnectionClosedOK` + * :exc:`InvalidHandshake` + * :exc:`SecurityError` + * :exc:`InvalidMessage` + * :exc:`InvalidHeader` + * :exc:`InvalidHeaderFormat` + * :exc:`InvalidHeaderValue` + * :exc:`InvalidOrigin` + * :exc:`InvalidUpgrade` + * :exc:`InvalidStatus` + * :exc:`InvalidStatusCode` (legacy) + * :exc:`NegotiationError` + * :exc:`DuplicateParameter` + * :exc:`InvalidParameterName` + * :exc:`InvalidParameterValue` + * :exc:`AbortHandshake` + * :exc:`RedirectHandshake` + * :exc:`InvalidState` + * :exc:`InvalidURI` + * :exc:`PayloadTooBig` + * :exc:`ProtocolError` + +""" + +from __future__ import annotations + +import http +from typing import Optional + +from . import datastructures, frames, http11 + + +__all__ = [ + "WebSocketException", + "ConnectionClosed", + "ConnectionClosedError", + "ConnectionClosedOK", + "InvalidHandshake", + "SecurityError", + "InvalidMessage", + "InvalidHeader", + "InvalidHeaderFormat", + "InvalidHeaderValue", + "InvalidOrigin", + "InvalidUpgrade", + "InvalidStatus", + "InvalidStatusCode", + "NegotiationError", + "DuplicateParameter", + "InvalidParameterName", + "InvalidParameterValue", + "AbortHandshake", + "RedirectHandshake", + "InvalidState", + "InvalidURI", + "PayloadTooBig", + "ProtocolError", + "WebSocketProtocolError", +] + + +class WebSocketException(Exception): + """ + Base class for all exceptions defined by websockets. + + """ + + +class ConnectionClosed(WebSocketException): + """ + Raised when trying to interact with a closed connection. + + Attributes: + rcvd (Optional[Close]): if a close frame was received, its code and + reason are available in ``rcvd.code`` and ``rcvd.reason``. + sent (Optional[Close]): if a close frame was sent, its code and reason + are available in ``sent.code`` and ``sent.reason``. + rcvd_then_sent (Optional[bool]): if close frames were received and + sent, this attribute tells in which order this happened, from the + perspective of this side of the connection. + + """ + + def __init__( + self, + rcvd: Optional[frames.Close], + sent: Optional[frames.Close], + rcvd_then_sent: Optional[bool] = None, + ) -> None: + self.rcvd = rcvd + self.sent = sent + self.rcvd_then_sent = rcvd_then_sent + + def __str__(self) -> str: + if self.rcvd is None: + if self.sent is None: + assert self.rcvd_then_sent is None + return "no close frame received or sent" + else: + assert self.rcvd_then_sent is None + return f"sent {self.sent}; no close frame received" + else: + if self.sent is None: + assert self.rcvd_then_sent is None + return f"received {self.rcvd}; no close frame sent" + else: + assert self.rcvd_then_sent is not None + if self.rcvd_then_sent: + return f"received {self.rcvd}; then sent {self.sent}" + else: + return f"sent {self.sent}; then received {self.rcvd}" + + # code and reason attributes are provided for backwards-compatibility + + @property + def code(self) -> int: + return 1006 if self.rcvd is None else self.rcvd.code + + @property + def reason(self) -> str: + return "" if self.rcvd is None else self.rcvd.reason + + +class ConnectionClosedError(ConnectionClosed): + """ + Like :exc:`ConnectionClosed`, when the connection terminated with an error. + + A close code other than 1000 (OK) or 1001 (going away) was received or + sent, or the closing handshake didn't complete properly. + + """ + + +class ConnectionClosedOK(ConnectionClosed): + """ + Like :exc:`ConnectionClosed`, when the connection terminated properly. + + A close code 1000 (OK) or 1001 (going away) was received and sent. + + """ + + +class InvalidHandshake(WebSocketException): + """ + Raised during the handshake when the WebSocket connection fails. + + """ + + +class SecurityError(InvalidHandshake): + """ + Raised when a handshake request or response breaks a security rule. + + Security limits are hard coded. + + """ + + +class InvalidMessage(InvalidHandshake): + """ + Raised when a handshake request or response is malformed. + + """ + + +class InvalidHeader(InvalidHandshake): + """ + Raised when a HTTP header doesn't have a valid format or value. + + """ + + def __init__(self, name: str, value: Optional[str] = None) -> None: + self.name = name + self.value = value + + def __str__(self) -> str: + if self.value is None: + return f"missing {self.name} header" + elif self.value == "": + return f"empty {self.name} header" + else: + return f"invalid {self.name} header: {self.value}" + + +class InvalidHeaderFormat(InvalidHeader): + """ + Raised when a HTTP header cannot be parsed. + + The format of the header doesn't match the grammar for that header. + + """ + + def __init__(self, name: str, error: str, header: str, pos: int) -> None: + super().__init__(name, f"{error} at {pos} in {header}") + + +class InvalidHeaderValue(InvalidHeader): + """ + Raised when a HTTP header has a wrong value. + + The format of the header is correct but a value isn't acceptable. + + """ + + +class InvalidOrigin(InvalidHeader): + """ + Raised when the Origin header in a request isn't allowed. + + """ + + def __init__(self, origin: Optional[str]) -> None: + super().__init__("Origin", origin) + + +class InvalidUpgrade(InvalidHeader): + """ + Raised when the Upgrade or Connection header isn't correct. + + """ + + +class InvalidStatus(InvalidHandshake): + """ + Raised when a handshake response rejects the WebSocket upgrade. + + """ + + def __init__(self, response: http11.Response) -> None: + self.response = response + + def __str__(self) -> str: + return ( + "server rejected WebSocket connection: " + f"HTTP {self.response.status_code:d}" + ) + + +class InvalidStatusCode(InvalidHandshake): + """ + Raised when a handshake response status code is invalid. + + """ + + def __init__(self, status_code: int, headers: datastructures.Headers) -> None: + self.status_code = status_code + self.headers = headers + + def __str__(self) -> str: + return f"server rejected WebSocket connection: HTTP {self.status_code}" + + +class NegotiationError(InvalidHandshake): + """ + Raised when negotiating an extension fails. + + """ + + +class DuplicateParameter(NegotiationError): + """ + Raised when a parameter name is repeated in an extension header. + + """ + + def __init__(self, name: str) -> None: + self.name = name + + def __str__(self) -> str: + return f"duplicate parameter: {self.name}" + + +class InvalidParameterName(NegotiationError): + """ + Raised when a parameter name in an extension header is invalid. + + """ + + def __init__(self, name: str) -> None: + self.name = name + + def __str__(self) -> str: + return f"invalid parameter name: {self.name}" + + +class InvalidParameterValue(NegotiationError): + """ + Raised when a parameter value in an extension header is invalid. + + """ + + def __init__(self, name: str, value: Optional[str]) -> None: + self.name = name + self.value = value + + def __str__(self) -> str: + if self.value is None: + return f"missing value for parameter {self.name}" + elif self.value == "": + return f"empty value for parameter {self.name}" + else: + return f"invalid value for parameter {self.name}: {self.value}" + + +class AbortHandshake(InvalidHandshake): + """ + Raised to abort the handshake on purpose and return a HTTP response. + + This exception is an implementation detail. + + The public API + is :meth:`~websockets.server.WebSocketServerProtocol.process_request`. + + Attributes: + status (~http.HTTPStatus): HTTP status code. + headers (Headers): HTTP response headers. + body (bytes): HTTP response body. + """ + + def __init__( + self, + status: http.HTTPStatus, + headers: datastructures.HeadersLike, + body: bytes = b"", + ) -> None: + self.status = status + self.headers = datastructures.Headers(headers) + self.body = body + + def __str__(self) -> str: + return ( + f"HTTP {self.status:d}, " + f"{len(self.headers)} headers, " + f"{len(self.body)} bytes" + ) + + +class RedirectHandshake(InvalidHandshake): + """ + Raised when a handshake gets redirected. + + This exception is an implementation detail. + + """ + + def __init__(self, uri: str) -> None: + self.uri = uri + + def __str__(self) -> str: + return f"redirect to {self.uri}" + + +class InvalidState(WebSocketException, AssertionError): + """ + Raised when an operation is forbidden in the current state. + + This exception is an implementation detail. + + It should never be raised in normal circumstances. + + """ + + +class InvalidURI(WebSocketException): + """ + Raised when connecting to an URI that isn't a valid WebSocket URI. + + """ + + def __init__(self, uri: str, msg: str) -> None: + self.uri = uri + self.msg = msg + + def __str__(self) -> str: + return f"{self.uri} isn't a valid URI: {self.msg}" + + +class PayloadTooBig(WebSocketException): + """ + Raised when receiving a frame with a payload exceeding the maximum size. + + """ + + +class ProtocolError(WebSocketException): + """ + Raised when a frame breaks the protocol. + + """ + + +WebSocketProtocolError = ProtocolError # for backwards compatibility diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/__init__.py new file mode 100644 index 00000000..02838b98 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/__init__.py @@ -0,0 +1,4 @@ +from .base import * + + +__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/base.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/base.py new file mode 100644 index 00000000..06096761 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/base.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +from typing import List, Optional, Sequence, Tuple + +from .. import frames +from ..typing import ExtensionName, ExtensionParameter + + +__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] + + +class Extension: + """ + Base class for extensions. + + """ + + name: ExtensionName + """Extension identifier.""" + + def decode( + self, + frame: frames.Frame, + *, + max_size: Optional[int] = None, + ) -> frames.Frame: + """ + Decode an incoming frame. + + Args: + frame (Frame): incoming frame. + max_size: maximum payload size in bytes. + + Returns: + Frame: Decoded frame. + + Raises: + PayloadTooBig: if decoding the payload exceeds ``max_size``. + + """ + + def encode(self, frame: frames.Frame) -> frames.Frame: + """ + Encode an outgoing frame. + + Args: + frame (Frame): outgoing frame. + + Returns: + Frame: Encoded frame. + + """ + + +class ClientExtensionFactory: + """ + Base class for client-side extension factories. + + """ + + name: ExtensionName + """Extension identifier.""" + + def get_request_params(self) -> List[ExtensionParameter]: + """ + Build parameters to send to the server for this extension. + + Returns: + List[ExtensionParameter]: Parameters to send to the server. + + """ + + def process_response_params( + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], + ) -> Extension: + """ + Process parameters received from the server. + + Args: + params (Sequence[ExtensionParameter]): parameters received from + the server for this extension. + accepted_extensions (Sequence[Extension]): list of previously + accepted extensions. + + Returns: + Extension: An extension instance. + + Raises: + NegotiationError: if parameters aren't acceptable. + + """ + + +class ServerExtensionFactory: + """ + Base class for server-side extension factories. + + """ + + name: ExtensionName + """Extension identifier.""" + + def process_request_params( + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], + ) -> Tuple[List[ExtensionParameter], Extension]: + """ + Process parameters received from the client. + + Args: + params (Sequence[ExtensionParameter]): parameters received from + the client for this extension. + accepted_extensions (Sequence[Extension]): list of previously + accepted extensions. + + Returns: + Tuple[List[ExtensionParameter], Extension]: To accept the offer, + parameters to send to the client for this extension and an + extension instance. + + Raises: + NegotiationError: to reject the offer, if parameters received from + the client aren't acceptable. + + """ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/permessage_deflate.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/permessage_deflate.py new file mode 100644 index 00000000..e0de5e8f --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/extensions/permessage_deflate.py @@ -0,0 +1,661 @@ +from __future__ import annotations + +import dataclasses +import zlib +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union + +from .. import exceptions, frames +from ..typing import ExtensionName, ExtensionParameter +from .base import ClientExtensionFactory, Extension, ServerExtensionFactory + + +__all__ = [ + "PerMessageDeflate", + "ClientPerMessageDeflateFactory", + "enable_client_permessage_deflate", + "ServerPerMessageDeflateFactory", + "enable_server_permessage_deflate", +] + +_EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff" + +_MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)] + + +class PerMessageDeflate(Extension): + """ + Per-Message Deflate extension. + + """ + + name = ExtensionName("permessage-deflate") + + def __init__( + self, + remote_no_context_takeover: bool, + local_no_context_takeover: bool, + remote_max_window_bits: int, + local_max_window_bits: int, + compress_settings: Optional[Dict[Any, Any]] = None, + ) -> None: + """ + Configure the Per-Message Deflate extension. + + """ + if compress_settings is None: + compress_settings = {} + + assert remote_no_context_takeover in [False, True] + assert local_no_context_takeover in [False, True] + assert 8 <= remote_max_window_bits <= 15 + assert 8 <= local_max_window_bits <= 15 + assert "wbits" not in compress_settings + + self.remote_no_context_takeover = remote_no_context_takeover + self.local_no_context_takeover = local_no_context_takeover + self.remote_max_window_bits = remote_max_window_bits + self.local_max_window_bits = local_max_window_bits + self.compress_settings = compress_settings + + if not self.remote_no_context_takeover: + self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) + + if not self.local_no_context_takeover: + self.encoder = zlib.compressobj( + wbits=-self.local_max_window_bits, **self.compress_settings + ) + + # To handle continuation frames properly, we must keep track of + # whether that initial frame was encoded. + self.decode_cont_data = False + # There's no need for self.encode_cont_data because we always encode + # outgoing frames, so it would always be True. + + def __repr__(self) -> str: + return ( + f"PerMessageDeflate(" + f"remote_no_context_takeover={self.remote_no_context_takeover}, " + f"local_no_context_takeover={self.local_no_context_takeover}, " + f"remote_max_window_bits={self.remote_max_window_bits}, " + f"local_max_window_bits={self.local_max_window_bits})" + ) + + def decode( + self, + frame: frames.Frame, + *, + max_size: Optional[int] = None, + ) -> frames.Frame: + """ + Decode an incoming frame. + + """ + # Skip control frames. + if frame.opcode in frames.CTRL_OPCODES: + return frame + + # Handle continuation data frames: + # - skip if the message isn't encoded + # - reset "decode continuation data" flag if it's a final frame + if frame.opcode is frames.OP_CONT: + if not self.decode_cont_data: + return frame + if frame.fin: + self.decode_cont_data = False + + # Handle text and binary data frames: + # - skip if the message isn't encoded + # - unset the rsv1 flag on the first frame of a compressed message + # - set "decode continuation data" flag if it's a non-final frame + else: + if not frame.rsv1: + return frame + frame = dataclasses.replace(frame, rsv1=False) + if not frame.fin: + self.decode_cont_data = True + + # Re-initialize per-message decoder. + if self.remote_no_context_takeover: + self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) + + # Uncompress data. Protect against zip bombs by preventing zlib from + # decompressing more than max_length bytes (except when the limit is + # disabled with max_size = None). + data = frame.data + if frame.fin: + data += _EMPTY_UNCOMPRESSED_BLOCK + max_length = 0 if max_size is None else max_size + try: + data = self.decoder.decompress(data, max_length) + except zlib.error as exc: + raise exceptions.ProtocolError("decompression failed") from exc + if self.decoder.unconsumed_tail: + raise exceptions.PayloadTooBig(f"over size limit (? > {max_size} bytes)") + + # Allow garbage collection of the decoder if it won't be reused. + if frame.fin and self.remote_no_context_takeover: + del self.decoder + + return dataclasses.replace(frame, data=data) + + def encode(self, frame: frames.Frame) -> frames.Frame: + """ + Encode an outgoing frame. + + """ + # Skip control frames. + if frame.opcode in frames.CTRL_OPCODES: + return frame + + # Since we always encode messages, there's no "encode continuation + # data" flag similar to "decode continuation data" at this time. + + if frame.opcode is not frames.OP_CONT: + # Set the rsv1 flag on the first frame of a compressed message. + frame = dataclasses.replace(frame, rsv1=True) + # Re-initialize per-message decoder. + if self.local_no_context_takeover: + self.encoder = zlib.compressobj( + wbits=-self.local_max_window_bits, **self.compress_settings + ) + + # Compress data. + data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH) + if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK): + data = data[:-4] + + # Allow garbage collection of the encoder if it won't be reused. + if frame.fin and self.local_no_context_takeover: + del self.encoder + + return dataclasses.replace(frame, data=data) + + +def _build_parameters( + server_no_context_takeover: bool, + client_no_context_takeover: bool, + server_max_window_bits: Optional[int], + client_max_window_bits: Optional[Union[int, bool]], +) -> List[ExtensionParameter]: + """ + Build a list of ``(name, value)`` pairs for some compression parameters. + + """ + params: List[ExtensionParameter] = [] + if server_no_context_takeover: + params.append(("server_no_context_takeover", None)) + if client_no_context_takeover: + params.append(("client_no_context_takeover", None)) + if server_max_window_bits: + params.append(("server_max_window_bits", str(server_max_window_bits))) + if client_max_window_bits is True: # only in handshake requests + params.append(("client_max_window_bits", None)) + elif client_max_window_bits: + params.append(("client_max_window_bits", str(client_max_window_bits))) + return params + + +def _extract_parameters( + params: Sequence[ExtensionParameter], *, is_server: bool +) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]: + """ + Extract compression parameters from a list of ``(name, value)`` pairs. + + If ``is_server`` is :obj:`True`, ``client_max_window_bits`` may be + provided without a value. This is only allowed in handshake requests. + + """ + server_no_context_takeover: bool = False + client_no_context_takeover: bool = False + server_max_window_bits: Optional[int] = None + client_max_window_bits: Optional[Union[int, bool]] = None + + for name, value in params: + + if name == "server_no_context_takeover": + if server_no_context_takeover: + raise exceptions.DuplicateParameter(name) + if value is None: + server_no_context_takeover = True + else: + raise exceptions.InvalidParameterValue(name, value) + + elif name == "client_no_context_takeover": + if client_no_context_takeover: + raise exceptions.DuplicateParameter(name) + if value is None: + client_no_context_takeover = True + else: + raise exceptions.InvalidParameterValue(name, value) + + elif name == "server_max_window_bits": + if server_max_window_bits is not None: + raise exceptions.DuplicateParameter(name) + if value in _MAX_WINDOW_BITS_VALUES: + server_max_window_bits = int(value) + else: + raise exceptions.InvalidParameterValue(name, value) + + elif name == "client_max_window_bits": + if client_max_window_bits is not None: + raise exceptions.DuplicateParameter(name) + if is_server and value is None: # only in handshake requests + client_max_window_bits = True + elif value in _MAX_WINDOW_BITS_VALUES: + client_max_window_bits = int(value) + else: + raise exceptions.InvalidParameterValue(name, value) + + else: + raise exceptions.InvalidParameterName(name) + + return ( + server_no_context_takeover, + client_no_context_takeover, + server_max_window_bits, + client_max_window_bits, + ) + + +class ClientPerMessageDeflateFactory(ClientExtensionFactory): + """ + Client-side extension factory for the Per-Message Deflate extension. + + Parameters behave as described in `section 7.1 of RFC 7692`_. + + .. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1 + + Set them to :obj:`True` to include them in the negotiation offer without a + value or to an integer value to include them with this value. + + Args: + server_no_context_takeover: prevent server from using context takeover. + client_no_context_takeover: prevent client from using context takeover. + server_max_window_bits: maximum size of the server's LZ77 sliding window + in bits, between 8 and 15. + client_max_window_bits: maximum size of the client's LZ77 sliding window + in bits, between 8 and 15, or :obj:`True` to indicate support without + setting a limit. + compress_settings: additional keyword arguments for :func:`zlib.compressobj`, + excluding ``wbits``. + + """ + + name = ExtensionName("permessage-deflate") + + def __init__( + self, + server_no_context_takeover: bool = False, + client_no_context_takeover: bool = False, + server_max_window_bits: Optional[int] = None, + client_max_window_bits: Optional[Union[int, bool]] = True, + compress_settings: Optional[Dict[str, Any]] = None, + ) -> None: + """ + Configure the Per-Message Deflate extension factory. + + """ + if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15): + raise ValueError("server_max_window_bits must be between 8 and 15") + if not ( + client_max_window_bits is None + or client_max_window_bits is True + or 8 <= client_max_window_bits <= 15 + ): + raise ValueError("client_max_window_bits must be between 8 and 15") + if compress_settings is not None and "wbits" in compress_settings: + raise ValueError( + "compress_settings must not include wbits, " + "set client_max_window_bits instead" + ) + + self.server_no_context_takeover = server_no_context_takeover + self.client_no_context_takeover = client_no_context_takeover + self.server_max_window_bits = server_max_window_bits + self.client_max_window_bits = client_max_window_bits + self.compress_settings = compress_settings + + def get_request_params(self) -> List[ExtensionParameter]: + """ + Build request parameters. + + """ + return _build_parameters( + self.server_no_context_takeover, + self.client_no_context_takeover, + self.server_max_window_bits, + self.client_max_window_bits, + ) + + def process_response_params( + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], + ) -> PerMessageDeflate: + """ + Process response parameters. + + Return an extension instance. + + """ + if any(other.name == self.name for other in accepted_extensions): + raise exceptions.NegotiationError(f"received duplicate {self.name}") + + # Request parameters are available in instance variables. + + # Load response parameters in local variables. + ( + server_no_context_takeover, + client_no_context_takeover, + server_max_window_bits, + client_max_window_bits, + ) = _extract_parameters(params, is_server=False) + + # After comparing the request and the response, the final + # configuration must be available in the local variables. + + # server_no_context_takeover + # + # Req. Resp. Result + # ------ ------ -------------------------------------------------- + # False False False + # False True True + # True False Error! + # True True True + + if self.server_no_context_takeover: + if not server_no_context_takeover: + raise exceptions.NegotiationError("expected server_no_context_takeover") + + # client_no_context_takeover + # + # Req. Resp. Result + # ------ ------ -------------------------------------------------- + # False False False + # False True True + # True False True - must change value + # True True True + + if self.client_no_context_takeover: + if not client_no_context_takeover: + client_no_context_takeover = True + + # server_max_window_bits + + # Req. Resp. Result + # ------ ------ -------------------------------------------------- + # None None None + # None 8≤M≤15 M + # 8≤N≤15 None Error! + # 8≤N≤15 8≤M≤N M + # 8≤N≤15 N self.server_max_window_bits: + raise exceptions.NegotiationError("unsupported server_max_window_bits") + + # client_max_window_bits + + # Req. Resp. Result + # ------ ------ -------------------------------------------------- + # None None None + # None 8≤M≤15 Error! + # True None None + # True 8≤M≤15 M + # 8≤N≤15 None N - must change value + # 8≤N≤15 8≤M≤N M + # 8≤N≤15 N self.client_max_window_bits: + raise exceptions.NegotiationError("unsupported client_max_window_bits") + + return PerMessageDeflate( + server_no_context_takeover, # remote_no_context_takeover + client_no_context_takeover, # local_no_context_takeover + server_max_window_bits or 15, # remote_max_window_bits + client_max_window_bits or 15, # local_max_window_bits + self.compress_settings, + ) + + +def enable_client_permessage_deflate( + extensions: Optional[Sequence[ClientExtensionFactory]], +) -> Sequence[ClientExtensionFactory]: + """ + Enable Per-Message Deflate with default settings in client extensions. + + If the extension is already present, perhaps with non-default settings, + the configuration isn't changed. + + """ + if extensions is None: + extensions = [] + if not any( + extension_factory.name == ClientPerMessageDeflateFactory.name + for extension_factory in extensions + ): + extensions = list(extensions) + [ + ClientPerMessageDeflateFactory( + compress_settings={"memLevel": 5}, + ) + ] + return extensions + + +class ServerPerMessageDeflateFactory(ServerExtensionFactory): + """ + Server-side extension factory for the Per-Message Deflate extension. + + Parameters behave as described in `section 7.1 of RFC 7692`_. + + .. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1 + + Set them to :obj:`True` to include them in the negotiation offer without a + value or to an integer value to include them with this value. + + Args: + server_no_context_takeover: prevent server from using context takeover. + client_no_context_takeover: prevent client from using context takeover. + server_max_window_bits: maximum size of the server's LZ77 sliding window + in bits, between 8 and 15. + client_max_window_bits: maximum size of the client's LZ77 sliding window + in bits, between 8 and 15. + compress_settings: additional keyword arguments for :func:`zlib.compressobj`, + excluding ``wbits``. + require_client_max_window_bits: do not enable compression at all if + client doesn't advertise support for ``client_max_window_bits``; + the default behavior is to enable compression without enforcing + ``client_max_window_bits``. + + """ + + name = ExtensionName("permessage-deflate") + + def __init__( + self, + server_no_context_takeover: bool = False, + client_no_context_takeover: bool = False, + server_max_window_bits: Optional[int] = None, + client_max_window_bits: Optional[int] = None, + compress_settings: Optional[Dict[str, Any]] = None, + require_client_max_window_bits: bool = False, + ) -> None: + """ + Configure the Per-Message Deflate extension factory. + + """ + if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15): + raise ValueError("server_max_window_bits must be between 8 and 15") + if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15): + raise ValueError("client_max_window_bits must be between 8 and 15") + if compress_settings is not None and "wbits" in compress_settings: + raise ValueError( + "compress_settings must not include wbits, " + "set server_max_window_bits instead" + ) + if client_max_window_bits is None and require_client_max_window_bits: + raise ValueError( + "require_client_max_window_bits is enabled, " + "but client_max_window_bits isn't configured" + ) + + self.server_no_context_takeover = server_no_context_takeover + self.client_no_context_takeover = client_no_context_takeover + self.server_max_window_bits = server_max_window_bits + self.client_max_window_bits = client_max_window_bits + self.compress_settings = compress_settings + self.require_client_max_window_bits = require_client_max_window_bits + + def process_request_params( + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], + ) -> Tuple[List[ExtensionParameter], PerMessageDeflate]: + """ + Process request parameters. + + Return response params and an extension instance. + + """ + if any(other.name == self.name for other in accepted_extensions): + raise exceptions.NegotiationError(f"skipped duplicate {self.name}") + + # Load request parameters in local variables. + ( + server_no_context_takeover, + client_no_context_takeover, + server_max_window_bits, + client_max_window_bits, + ) = _extract_parameters(params, is_server=True) + + # Configuration parameters are available in instance variables. + + # After comparing the request and the configuration, the response must + # be available in the local variables. + + # server_no_context_takeover + # + # Config Req. Resp. + # ------ ------ -------------------------------------------------- + # False False False + # False True True + # True False True - must change value to True + # True True True + + if self.server_no_context_takeover: + if not server_no_context_takeover: + server_no_context_takeover = True + + # client_no_context_takeover + # + # Config Req. Resp. + # ------ ------ -------------------------------------------------- + # False False False + # False True True (or False) + # True False True - must change value to True + # True True True (or False) + + if self.client_no_context_takeover: + if not client_no_context_takeover: + client_no_context_takeover = True + + # server_max_window_bits + + # Config Req. Resp. + # ------ ------ -------------------------------------------------- + # None None None + # None 8≤M≤15 M + # 8≤N≤15 None N - must change value + # 8≤N≤15 8≤M≤N M + # 8≤N≤15 N self.server_max_window_bits: + server_max_window_bits = self.server_max_window_bits + + # client_max_window_bits + + # Config Req. Resp. + # ------ ------ -------------------------------------------------- + # None None None + # None True None - must change value + # None 8≤M≤15 M (or None) + # 8≤N≤15 None None or Error! + # 8≤N≤15 True N - must change value + # 8≤N≤15 8≤M≤N M (or None) + # 8≤N≤15 N Sequence[ServerExtensionFactory]: + """ + Enable Per-Message Deflate with default settings in server extensions. + + If the extension is already present, perhaps with non-default settings, + the configuration isn't changed. + + """ + if extensions is None: + extensions = [] + if not any( + ext_factory.name == ServerPerMessageDeflateFactory.name + for ext_factory in extensions + ): + extensions = list(extensions) + [ + ServerPerMessageDeflateFactory( + server_max_window_bits=12, + client_max_window_bits=12, + compress_settings={"memLevel": 5}, + ) + ] + return extensions diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/frames.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/frames.py new file mode 100644 index 00000000..043b688b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/frames.py @@ -0,0 +1,443 @@ +from __future__ import annotations + +import dataclasses +import enum +import io +import secrets +import struct +from typing import Callable, Generator, Optional, Sequence, Tuple + +from . import exceptions, extensions +from .typing import Data + + +try: + from .speedups import apply_mask +except ImportError: # pragma: no cover + from .utils import apply_mask + + +__all__ = [ + "Opcode", + "OP_CONT", + "OP_TEXT", + "OP_BINARY", + "OP_CLOSE", + "OP_PING", + "OP_PONG", + "DATA_OPCODES", + "CTRL_OPCODES", + "Frame", + "prepare_data", + "prepare_ctrl", + "Close", +] + + +class Opcode(enum.IntEnum): + """Opcode values for WebSocket frames.""" + + CONT, TEXT, BINARY = 0x00, 0x01, 0x02 + CLOSE, PING, PONG = 0x08, 0x09, 0x0A + + +OP_CONT = Opcode.CONT +OP_TEXT = Opcode.TEXT +OP_BINARY = Opcode.BINARY +OP_CLOSE = Opcode.CLOSE +OP_PING = Opcode.PING +OP_PONG = Opcode.PONG + +DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY +CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG + + +# See https://www.iana.org/assignments/websocket/websocket.xhtml +CLOSE_CODES = { + 1000: "OK", + 1001: "going away", + 1002: "protocol error", + 1003: "unsupported type", + # 1004 is reserved + 1005: "no status code [internal]", + 1006: "connection closed abnormally [internal]", + 1007: "invalid data", + 1008: "policy violation", + 1009: "message too big", + 1010: "extension required", + 1011: "unexpected error", + 1012: "service restart", + 1013: "try again later", + 1014: "bad gateway", + 1015: "TLS failure [internal]", +} + + +# Close code that are allowed in a close frame. +# Using a set optimizes `code in EXTERNAL_CLOSE_CODES`. +EXTERNAL_CLOSE_CODES = { + 1000, + 1001, + 1002, + 1003, + 1007, + 1008, + 1009, + 1010, + 1011, + 1012, + 1013, + 1014, +} + +OK_CLOSE_CODES = {1000, 1001} + + +BytesLike = bytes, bytearray, memoryview + + +@dataclasses.dataclass +class Frame: + """ + WebSocket frame. + + Attributes: + opcode: Opcode. + data: Payload data. + fin: FIN bit. + rsv1: RSV1 bit. + rsv2: RSV2 bit. + rsv3: RSV3 bit. + + Only these fields are needed. The MASK bit, payload length and masking-key + are handled on the fly when parsing and serializing frames. + + """ + + opcode: Opcode + data: bytes + fin: bool = True + rsv1: bool = False + rsv2: bool = False + rsv3: bool = False + + def __str__(self) -> str: + """ + Return a human-readable represention of a frame. + + """ + coding = None + length = f"{len(self.data)} byte{'' if len(self.data) == 1 else 's'}" + non_final = "" if self.fin else "continued" + + if self.opcode is OP_TEXT: + # Decoding only the beginning and the end is needlessly hard. + # Decode the entire payload then elide later if necessary. + data = repr(self.data.decode()) + elif self.opcode is OP_BINARY: + # We'll show at most the first 16 bytes and the last 8 bytes. + # Encode just what we need, plus two dummy bytes to elide later. + binary = self.data + if len(binary) > 25: + binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]]) + data = " ".join(f"{byte:02x}" for byte in binary) + elif self.opcode is OP_CLOSE: + data = str(Close.parse(self.data)) + elif self.data: + # We don't know if a Continuation frame contains text or binary. + # Ping and Pong frames could contain UTF-8. + # Attempt to decode as UTF-8 and display it as text; fallback to + # binary. If self.data is a memoryview, it has no decode() method, + # which raises AttributeError. + try: + data = repr(self.data.decode()) + coding = "text" + except (UnicodeDecodeError, AttributeError): + binary = self.data + if len(binary) > 25: + binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]]) + data = " ".join(f"{byte:02x}" for byte in binary) + coding = "binary" + else: + data = "''" + + if len(data) > 75: + data = data[:48] + "..." + data[-24:] + + metadata = ", ".join(filter(None, [coding, length, non_final])) + + return f"{self.opcode.name} {data} [{metadata}]" + + @classmethod + def parse( + cls, + read_exact: Callable[[int], Generator[None, None, bytes]], + *, + mask: bool, + max_size: Optional[int] = None, + extensions: Optional[Sequence[extensions.Extension]] = None, + ) -> Generator[None, None, Frame]: + """ + Parse a WebSocket frame. + + This is a generator-based coroutine. + + Args: + read_exact: generator-based coroutine that reads the requested + bytes or raises an exception if there isn't enough data. + mask: whether the frame should be masked i.e. whether the read + happens on the server side. + max_size: maximum payload size in bytes. + extensions: list of extensions, applied in reverse order. + + Raises: + PayloadTooBig: if the frame's payload size exceeds ``max_size``. + ProtocolError: if the frame contains incorrect values. + + """ + # Read the header. + data = yield from read_exact(2) + head1, head2 = struct.unpack("!BB", data) + + # While not Pythonic, this is marginally faster than calling bool(). + fin = True if head1 & 0b10000000 else False + rsv1 = True if head1 & 0b01000000 else False + rsv2 = True if head1 & 0b00100000 else False + rsv3 = True if head1 & 0b00010000 else False + + try: + opcode = Opcode(head1 & 0b00001111) + except ValueError as exc: + raise exceptions.ProtocolError("invalid opcode") from exc + + if (True if head2 & 0b10000000 else False) != mask: + raise exceptions.ProtocolError("incorrect masking") + + length = head2 & 0b01111111 + if length == 126: + data = yield from read_exact(2) + (length,) = struct.unpack("!H", data) + elif length == 127: + data = yield from read_exact(8) + (length,) = struct.unpack("!Q", data) + if max_size is not None and length > max_size: + raise exceptions.PayloadTooBig( + f"over size limit ({length} > {max_size} bytes)" + ) + if mask: + mask_bytes = yield from read_exact(4) + + # Read the data. + data = yield from read_exact(length) + if mask: + data = apply_mask(data, mask_bytes) + + frame = cls(opcode, data, fin, rsv1, rsv2, rsv3) + + if extensions is None: + extensions = [] + for extension in reversed(extensions): + frame = extension.decode(frame, max_size=max_size) + + frame.check() + + return frame + + def serialize( + self, + *, + mask: bool, + extensions: Optional[Sequence[extensions.Extension]] = None, + ) -> bytes: + """ + Serialize a WebSocket frame. + + Args: + mask: whether the frame should be masked i.e. whether the write + happens on the client side. + extensions: list of extensions, applied in order. + + Raises: + ProtocolError: if the frame contains incorrect values. + + """ + self.check() + + if extensions is None: + extensions = [] + for extension in extensions: + self = extension.encode(self) + + output = io.BytesIO() + + # Prepare the header. + head1 = ( + (0b10000000 if self.fin else 0) + | (0b01000000 if self.rsv1 else 0) + | (0b00100000 if self.rsv2 else 0) + | (0b00010000 if self.rsv3 else 0) + | self.opcode + ) + + head2 = 0b10000000 if mask else 0 + + length = len(self.data) + if length < 126: + output.write(struct.pack("!BB", head1, head2 | length)) + elif length < 65536: + output.write(struct.pack("!BBH", head1, head2 | 126, length)) + else: + output.write(struct.pack("!BBQ", head1, head2 | 127, length)) + + if mask: + mask_bytes = secrets.token_bytes(4) + output.write(mask_bytes) + + # Prepare the data. + if mask: + data = apply_mask(self.data, mask_bytes) + else: + data = self.data + output.write(data) + + return output.getvalue() + + def check(self) -> None: + """ + Check that reserved bits and opcode have acceptable values. + + Raises: + ProtocolError: if a reserved bit or the opcode is invalid. + + """ + if self.rsv1 or self.rsv2 or self.rsv3: + raise exceptions.ProtocolError("reserved bits must be 0") + + if self.opcode in CTRL_OPCODES: + if len(self.data) > 125: + raise exceptions.ProtocolError("control frame too long") + if not self.fin: + raise exceptions.ProtocolError("fragmented control frame") + + +def prepare_data(data: Data) -> Tuple[int, bytes]: + """ + Convert a string or byte-like object to an opcode and a bytes-like object. + + This function is designed for data frames. + + If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes` + object encoding ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like + object. + + Raises: + TypeError: if ``data`` doesn't have a supported type. + + """ + if isinstance(data, str): + return OP_TEXT, data.encode("utf-8") + elif isinstance(data, BytesLike): + return OP_BINARY, data + else: + raise TypeError("data must be str or bytes-like") + + +def prepare_ctrl(data: Data) -> bytes: + """ + Convert a string or byte-like object to bytes. + + This function is designed for ping and pong frames. + + If ``data`` is a :class:`str`, return a :class:`bytes` object encoding + ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return a :class:`bytes` object. + + Raises: + TypeError: if ``data`` doesn't have a supported type. + + """ + if isinstance(data, str): + return data.encode("utf-8") + elif isinstance(data, BytesLike): + return bytes(data) + else: + raise TypeError("data must be str or bytes-like") + + +@dataclasses.dataclass +class Close: + """ + Code and reason for WebSocket close frames. + + Attributes: + code: Close code. + reason: Close reason. + + """ + + code: int + reason: str + + def __str__(self) -> str: + """ + Return a human-readable represention of a close code and reason. + + """ + if 3000 <= self.code < 4000: + explanation = "registered" + elif 4000 <= self.code < 5000: + explanation = "private use" + else: + explanation = CLOSE_CODES.get(self.code, "unknown") + result = f"{self.code} ({explanation})" + + if self.reason: + result = f"{result} {self.reason}" + + return result + + @classmethod + def parse(cls, data: bytes) -> Close: + """ + Parse the payload of a close frame. + + Args: + data: payload of the close frame. + + Raises: + ProtocolError: if data is ill-formed. + UnicodeDecodeError: if the reason isn't valid UTF-8. + + """ + if len(data) >= 2: + (code,) = struct.unpack("!H", data[:2]) + reason = data[2:].decode("utf-8") + close = cls(code, reason) + close.check() + return close + elif len(data) == 0: + return cls(1005, "") + else: + raise exceptions.ProtocolError("close frame too short") + + def serialize(self) -> bytes: + """ + Serialize the payload of a close frame. + + """ + self.check() + return struct.pack("!H", self.code) + self.reason.encode("utf-8") + + def check(self) -> None: + """ + Check that the close code has a valid value for a close frame. + + Raises: + ProtocolError: if the close code is invalid. + + """ + if not (self.code in EXTERNAL_CLOSE_CODES or 3000 <= self.code < 5000): + raise exceptions.ProtocolError("invalid status code") diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/headers.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/headers.py new file mode 100644 index 00000000..9ae3035a --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/headers.py @@ -0,0 +1,587 @@ +from __future__ import annotations + +import base64 +import binascii +import ipaddress +import re +from typing import Callable, List, Optional, Sequence, Tuple, TypeVar, cast + +from . import exceptions +from .typing import ( + ConnectionOption, + ExtensionHeader, + ExtensionName, + ExtensionParameter, + Subprotocol, + UpgradeProtocol, +) + + +__all__ = [ + "build_host", + "parse_connection", + "parse_upgrade", + "parse_extension", + "build_extension", + "parse_subprotocol", + "build_subprotocol", + "validate_subprotocols", + "build_www_authenticate_basic", + "parse_authorization_basic", + "build_authorization_basic", +] + + +T = TypeVar("T") + + +def build_host(host: str, port: int, secure: bool) -> str: + """ + Build a ``Host`` header. + + """ + # https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.2 + # IPv6 addresses must be enclosed in brackets. + try: + address = ipaddress.ip_address(host) + except ValueError: + # host is a hostname + pass + else: + # host is an IP address + if address.version == 6: + host = f"[{host}]" + + if port != (443 if secure else 80): + host = f"{host}:{port}" + + return host + + +# To avoid a dependency on a parsing library, we implement manually the ABNF +# described in https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 and +# https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B. + + +def peek_ahead(header: str, pos: int) -> Optional[str]: + """ + Return the next character from ``header`` at the given position. + + Return :obj:`None` at the end of ``header``. + + We never need to peek more than one character ahead. + + """ + return None if pos == len(header) else header[pos] + + +_OWS_re = re.compile(r"[\t ]*") + + +def parse_OWS(header: str, pos: int) -> int: + """ + Parse optional whitespace from ``header`` at the given position. + + Return the new position. + + The whitespace itself isn't returned because it isn't significant. + + """ + # There's always a match, possibly empty, whose content doesn't matter. + match = _OWS_re.match(header, pos) + assert match is not None + return match.end() + + +_token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") + + +def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]: + """ + Parse a token from ``header`` at the given position. + + Return the token value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + match = _token_re.match(header, pos) + if match is None: + raise exceptions.InvalidHeaderFormat(header_name, "expected token", header, pos) + return match.group(), match.end() + + +_quoted_string_re = re.compile( + r'"(?:[\x09\x20-\x21\x23-\x5b\x5d-\x7e]|\\[\x09\x20-\x7e\x80-\xff])*"' +) + + +_unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])") + + +def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, int]: + """ + Parse a quoted string from ``header`` at the given position. + + Return the unquoted value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + match = _quoted_string_re.match(header, pos) + if match is None: + raise exceptions.InvalidHeaderFormat( + header_name, "expected quoted string", header, pos + ) + return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end() + + +_quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*") + + +_quote_re = re.compile(r"([\x22\x5c])") + + +def build_quoted_string(value: str) -> str: + """ + Format ``value`` as a quoted string. + + This is the reverse of :func:`parse_quoted_string`. + + """ + match = _quotable_re.fullmatch(value) + if match is None: + raise ValueError("invalid characters for quoted-string encoding") + return '"' + _quote_re.sub(r"\\\1", value) + '"' + + +def parse_list( + parse_item: Callable[[str, int, str], Tuple[T, int]], + header: str, + pos: int, + header_name: str, +) -> List[T]: + """ + Parse a comma-separated list from ``header`` at the given position. + + This is appropriate for parsing values with the following grammar: + + 1#item + + ``parse_item`` parses one item. + + ``header`` is assumed not to start or end with whitespace. + + (This function is designed for parsing an entire header value and + :func:`~websockets.http.read_headers` strips whitespace from values.) + + Return a list of items. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + # Per https://www.rfc-editor.org/rfc/rfc7230.html#section-7, "a recipient + # MUST parse and ignore a reasonable number of empty list elements"; + # hence while loops that remove extra delimiters. + + # Remove extra delimiters before the first item. + while peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) + + items = [] + while True: + # Loop invariant: a item starts at pos in header. + item, pos = parse_item(header, pos, header_name) + items.append(item) + pos = parse_OWS(header, pos) + + # We may have reached the end of the header. + if pos == len(header): + break + + # There must be a delimiter after each element except the last one. + if peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) + else: + raise exceptions.InvalidHeaderFormat( + header_name, "expected comma", header, pos + ) + + # Remove extra delimiters before the next item. + while peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) + + # We may have reached the end of the header. + if pos == len(header): + break + + # Since we only advance in the header by one character with peek_ahead() + # or with the end position of a regex match, we can't overshoot the end. + assert pos == len(header) + + return items + + +def parse_connection_option( + header: str, pos: int, header_name: str +) -> Tuple[ConnectionOption, int]: + """ + Parse a Connection option from ``header`` at the given position. + + Return the protocol value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + item, pos = parse_token(header, pos, header_name) + return cast(ConnectionOption, item), pos + + +def parse_connection(header: str) -> List[ConnectionOption]: + """ + Parse a ``Connection`` header. + + Return a list of HTTP connection options. + + Args + header: value of the ``Connection`` header. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + return parse_list(parse_connection_option, header, 0, "Connection") + + +_protocol_re = re.compile( + r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?" +) + + +def parse_upgrade_protocol( + header: str, pos: int, header_name: str +) -> Tuple[UpgradeProtocol, int]: + """ + Parse an Upgrade protocol from ``header`` at the given position. + + Return the protocol value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + match = _protocol_re.match(header, pos) + if match is None: + raise exceptions.InvalidHeaderFormat( + header_name, "expected protocol", header, pos + ) + return cast(UpgradeProtocol, match.group()), match.end() + + +def parse_upgrade(header: str) -> List[UpgradeProtocol]: + """ + Parse an ``Upgrade`` header. + + Return a list of HTTP protocols. + + Args: + header: value of the ``Upgrade`` header. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + return parse_list(parse_upgrade_protocol, header, 0, "Upgrade") + + +def parse_extension_item_param( + header: str, pos: int, header_name: str +) -> Tuple[ExtensionParameter, int]: + """ + Parse a single extension parameter from ``header`` at the given position. + + Return a ``(name, value)`` pair and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + # Extract parameter name. + name, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) + # Extract parameter value, if there is one. + value: Optional[str] = None + if peek_ahead(header, pos) == "=": + pos = parse_OWS(header, pos + 1) + if peek_ahead(header, pos) == '"': + pos_before = pos # for proper error reporting below + value, pos = parse_quoted_string(header, pos, header_name) + # https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 says: + # the value after quoted-string unescaping MUST conform to + # the 'token' ABNF. + if _token_re.fullmatch(value) is None: + raise exceptions.InvalidHeaderFormat( + header_name, "invalid quoted header content", header, pos_before + ) + else: + value, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) + + return (name, value), pos + + +def parse_extension_item( + header: str, pos: int, header_name: str +) -> Tuple[ExtensionHeader, int]: + """ + Parse an extension definition from ``header`` at the given position. + + Return an ``(extension name, parameters)`` pair, where ``parameters`` is a + list of ``(name, value)`` pairs, and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + # Extract extension name. + name, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) + # Extract all parameters. + parameters = [] + while peek_ahead(header, pos) == ";": + pos = parse_OWS(header, pos + 1) + parameter, pos = parse_extension_item_param(header, pos, header_name) + parameters.append(parameter) + return (cast(ExtensionName, name), parameters), pos + + +def parse_extension(header: str) -> List[ExtensionHeader]: + """ + Parse a ``Sec-WebSocket-Extensions`` header. + + Return a list of WebSocket extensions and their parameters in this format:: + + [ + ( + 'extension name', + [ + ('parameter name', 'parameter value'), + .... + ] + ), + ... + ] + + Parameter values are :obj:`None` when no value is provided. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions") + + +parse_extension_list = parse_extension # alias for backwards compatibility + + +def build_extension_item( + name: ExtensionName, parameters: List[ExtensionParameter] +) -> str: + """ + Build an extension definition. + + This is the reverse of :func:`parse_extension_item`. + + """ + return "; ".join( + [cast(str, name)] + + [ + # Quoted strings aren't necessary because values are always tokens. + name if value is None else f"{name}={value}" + for name, value in parameters + ] + ) + + +def build_extension(extensions: Sequence[ExtensionHeader]) -> str: + """ + Build a ``Sec-WebSocket-Extensions`` header. + + This is the reverse of :func:`parse_extension`. + + """ + return ", ".join( + build_extension_item(name, parameters) for name, parameters in extensions + ) + + +build_extension_list = build_extension # alias for backwards compatibility + + +def parse_subprotocol_item( + header: str, pos: int, header_name: str +) -> Tuple[Subprotocol, int]: + """ + Parse a subprotocol from ``header`` at the given position. + + Return the subprotocol value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + item, pos = parse_token(header, pos, header_name) + return cast(Subprotocol, item), pos + + +def parse_subprotocol(header: str) -> List[Subprotocol]: + """ + Parse a ``Sec-WebSocket-Protocol`` header. + + Return a list of WebSocket subprotocols. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol") + + +parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility + + +def build_subprotocol(subprotocols: Sequence[Subprotocol]) -> str: + """ + Build a ``Sec-WebSocket-Protocol`` header. + + This is the reverse of :func:`parse_subprotocol`. + + """ + return ", ".join(subprotocols) + + +build_subprotocol_list = build_subprotocol # alias for backwards compatibility + + +def validate_subprotocols(subprotocols: Sequence[Subprotocol]) -> None: + """ + Validate that ``subprotocols`` is suitable for :func:`build_subprotocol`. + + """ + if not isinstance(subprotocols, Sequence): + raise TypeError("subprotocols must be a list") + if isinstance(subprotocols, str): + raise TypeError("subprotocols must be a list, not a str") + for subprotocol in subprotocols: + if not _token_re.fullmatch(subprotocol): + raise ValueError(f"invalid subprotocol: {subprotocol}") + + +def build_www_authenticate_basic(realm: str) -> str: + """ + Build a ``WWW-Authenticate`` header for HTTP Basic Auth. + + Args: + realm: identifier of the protection space. + + """ + # https://www.rfc-editor.org/rfc/rfc7617.html#section-2 + realm = build_quoted_string(realm) + charset = build_quoted_string("UTF-8") + return f"Basic realm={realm}, charset={charset}" + + +_token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*") + + +def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]: + """ + Parse a token68 from ``header`` at the given position. + + Return the token value and the new position. + + Raises: + InvalidHeaderFormat: on invalid inputs. + + """ + match = _token68_re.match(header, pos) + if match is None: + raise exceptions.InvalidHeaderFormat( + header_name, "expected token68", header, pos + ) + return match.group(), match.end() + + +def parse_end(header: str, pos: int, header_name: str) -> None: + """ + Check that parsing reached the end of header. + + """ + if pos < len(header): + raise exceptions.InvalidHeaderFormat(header_name, "trailing data", header, pos) + + +def parse_authorization_basic(header: str) -> Tuple[str, str]: + """ + Parse an ``Authorization`` header for HTTP Basic Auth. + + Return a ``(username, password)`` tuple. + + Args: + header: value of the ``Authorization`` header. + + Raises: + InvalidHeaderFormat: on invalid inputs. + InvalidHeaderValue: on unsupported inputs. + + """ + # https://www.rfc-editor.org/rfc/rfc7235.html#section-2.1 + # https://www.rfc-editor.org/rfc/rfc7617.html#section-2 + scheme, pos = parse_token(header, 0, "Authorization") + if scheme.lower() != "basic": + raise exceptions.InvalidHeaderValue( + "Authorization", + f"unsupported scheme: {scheme}", + ) + if peek_ahead(header, pos) != " ": + raise exceptions.InvalidHeaderFormat( + "Authorization", "expected space after scheme", header, pos + ) + pos += 1 + basic_credentials, pos = parse_token68(header, pos, "Authorization") + parse_end(header, pos, "Authorization") + + try: + user_pass = base64.b64decode(basic_credentials.encode()).decode() + except binascii.Error: + raise exceptions.InvalidHeaderValue( + "Authorization", + "expected base64-encoded credentials", + ) from None + try: + username, password = user_pass.split(":", 1) + except ValueError: + raise exceptions.InvalidHeaderValue( + "Authorization", + "expected username:password credentials", + ) from None + + return username, password + + +def build_authorization_basic(username: str, password: str) -> str: + """ + Build an ``Authorization`` header for HTTP Basic Auth. + + This is the reverse of :func:`parse_authorization_basic`. + + """ + # https://www.rfc-editor.org/rfc/rfc7617.html#section-2 + assert ":" not in username + user_pass = f"{username}:{password}" + basic_credentials = base64.b64encode(user_pass.encode()).decode() + return "Basic " + basic_credentials diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http.py new file mode 100644 index 00000000..b14fa94b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import sys + +from .imports import lazy_import +from .version import version as websockets_version + + +# For backwards compatibility: + + +lazy_import( + globals(), + # Headers and MultipleValuesError used to be defined in this module. + aliases={ + "Headers": ".datastructures", + "MultipleValuesError": ".datastructures", + }, + deprecated_aliases={ + "read_request": ".legacy.http", + "read_response": ".legacy.http", + }, +) + + +__all__ = ["USER_AGENT"] + + +PYTHON_VERSION = "{}.{}".format(*sys.version_info) +USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http11.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http11.py new file mode 100644 index 00000000..84048fa4 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/http11.py @@ -0,0 +1,366 @@ +from __future__ import annotations + +import dataclasses +import re +import warnings +from typing import Callable, Generator, Optional + +from . import datastructures, exceptions + + +# Maximum total size of headers is around 256 * 4 KiB = 1 MiB +MAX_HEADERS = 256 + +# We can use the same limit for the request line and header lines: +# "GET <4096 bytes> HTTP/1.1\r\n" = 4111 bytes +# "Set-Cookie: <4097 bytes>\r\n" = 4111 bytes +# (RFC requires 4096 bytes; for some reason Firefox supports 4097 bytes.) +MAX_LINE = 4111 + +# Support for HTTP response bodies is intended to read an error message +# returned by a server. It isn't designed to perform large file transfers. +MAX_BODY = 2**20 # 1 MiB + + +def d(value: bytes) -> str: + """ + Decode a bytestring for interpolating into an error message. + + """ + return value.decode(errors="backslashreplace") + + +# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B. + +# Regex for validating header names. + +_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") + +# Regex for validating header values. + +# We don't attempt to support obsolete line folding. + +# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff). + +# The ABNF is complicated because it attempts to express that optional +# whitespace is ignored. We strip whitespace and don't revalidate that. + +# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 + +_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") + + +@dataclasses.dataclass +class Request: + """ + WebSocket handshake request. + + Attributes: + path: Request path, including optional query. + headers: Request headers. + """ + + path: str + headers: datastructures.Headers + # body isn't useful is the context of this library. + + _exception: Optional[Exception] = None + + @property + def exception(self) -> Optional[Exception]: # pragma: no cover + warnings.warn( + "Request.exception is deprecated; " + "use ServerConnection.handshake_exc instead", + DeprecationWarning, + ) + return self._exception + + @classmethod + def parse( + cls, + read_line: Callable[[int], Generator[None, None, bytes]], + ) -> Generator[None, None, Request]: + """ + Parse a WebSocket handshake request. + + This is a generator-based coroutine. + + The request path isn't URL-decoded or validated in any way. + + The request path and headers are expected to contain only ASCII + characters. Other characters are represented with surrogate escapes. + + :meth:`parse` doesn't attempt to read the request body because + WebSocket handshake requests don't have one. If the request contains a + body, it may be read from the data stream after :meth:`parse` returns. + + Args: + read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data + + Raises: + EOFError: if the connection is closed without a full HTTP request. + SecurityError: if the request exceeds a security limit. + ValueError: if the request isn't well formatted. + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1 + + # Parsing is simple because fixed values are expected for method and + # version and because path isn't checked. Since WebSocket software tends + # to implement HTTP/1.1 strictly, there's little need for lenient parsing. + + try: + request_line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP request line") from exc + + try: + method, raw_path, version = request_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None + + if method != b"GET": + raise ValueError(f"unsupported HTTP method: {d(method)}") + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + path = raw_path.decode("ascii", "surrogateescape") + + headers = yield from parse_headers(read_line) + + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3 + + if "Transfer-Encoding" in headers: + raise NotImplementedError("transfer codings aren't supported") + + if "Content-Length" in headers: + raise ValueError("unsupported request body") + + return cls(path, headers) + + def serialize(self) -> bytes: + """ + Serialize a WebSocket handshake request. + + """ + # Since the request line and headers only contain ASCII characters, + # we can keep this simple. + request = f"GET {self.path} HTTP/1.1\r\n".encode() + request += self.headers.serialize() + return request + + +@dataclasses.dataclass +class Response: + """ + WebSocket handshake response. + + Attributes: + status_code: Response code. + reason_phrase: Response reason. + headers: Response headers. + body: Response body, if any. + + """ + + status_code: int + reason_phrase: str + headers: datastructures.Headers + body: Optional[bytes] = None + + _exception: Optional[Exception] = None + + @property + def exception(self) -> Optional[Exception]: # pragma: no cover + warnings.warn( + "Response.exception is deprecated; " + "use ClientConnection.handshake_exc instead", + DeprecationWarning, + ) + return self._exception + + @classmethod + def parse( + cls, + read_line: Callable[[int], Generator[None, None, bytes]], + read_exact: Callable[[int], Generator[None, None, bytes]], + read_to_eof: Callable[[int], Generator[None, None, bytes]], + ) -> Generator[None, None, Response]: + """ + Parse a WebSocket handshake response. + + This is a generator-based coroutine. + + The reason phrase and headers are expected to contain only ASCII + characters. Other characters are represented with surrogate escapes. + + Args: + read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data. + read_exact: generator-based coroutine that reads the requested + bytes or raises an exception if there isn't enough data. + read_to_eof: generator-based coroutine that reads until the end + of the stream. + + Raises: + EOFError: if the connection is closed without a full HTTP response. + SecurityError: if the response exceeds a security limit. + LookupError: if the response isn't well formatted. + ValueError: if the response isn't well formatted. + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2 + + try: + status_line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP status line") from exc + + try: + version, raw_status_code, raw_reason = status_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None + + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + try: + status_code = int(raw_status_code) + except ValueError: # invalid literal for int() with base 10 + raise ValueError( + f"invalid HTTP status code: {d(raw_status_code)}" + ) from None + if not 100 <= status_code < 1000: + raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") + if not _value_re.fullmatch(raw_reason): + raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") + reason = raw_reason.decode() + + headers = yield from parse_headers(read_line) + + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3 + + if "Transfer-Encoding" in headers: + raise NotImplementedError("transfer codings aren't supported") + + # Since websockets only does GET requests (no HEAD, no CONNECT), all + # responses except 1xx, 204, and 304 include a message body. + if 100 <= status_code < 200 or status_code == 204 or status_code == 304: + body = None + else: + content_length: Optional[int] + try: + # MultipleValuesError is sufficiently unlikely that we don't + # attempt to handle it. Instead we document that its parent + # class, LookupError, may be raised. + raw_content_length = headers["Content-Length"] + except KeyError: + content_length = None + else: + content_length = int(raw_content_length) + + if content_length is None: + try: + body = yield from read_to_eof(MAX_BODY) + except RuntimeError: + raise exceptions.SecurityError( + f"body too large: over {MAX_BODY} bytes" + ) + elif content_length > MAX_BODY: + raise exceptions.SecurityError( + f"body too large: {content_length} bytes" + ) + else: + body = yield from read_exact(content_length) + + return cls(status_code, reason, headers, body) + + def serialize(self) -> bytes: + """ + Serialize a WebSocket handshake response. + + """ + # Since the status line and headers only contain ASCII characters, + # we can keep this simple. + response = f"HTTP/1.1 {self.status_code} {self.reason_phrase}\r\n".encode() + response += self.headers.serialize() + if self.body is not None: + response += self.body + return response + + +def parse_headers( + read_line: Callable[[int], Generator[None, None, bytes]], +) -> Generator[None, None, datastructures.Headers]: + """ + Parse HTTP headers. + + Non-ASCII characters are represented with surrogate escapes. + + Args: + read_line: generator-based coroutine that reads a LF-terminated line + or raises an exception if there isn't enough data. + + Raises: + EOFError: if the connection is closed without complete headers. + SecurityError: if the request exceeds a security limit. + ValueError: if the request isn't well formatted. + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2 + + # We don't attempt to support obsolete line folding. + + headers = datastructures.Headers() + for _ in range(MAX_HEADERS + 1): + try: + line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP headers") from exc + if line == b"": + break + + try: + raw_name, raw_value = line.split(b":", 1) + except ValueError: # not enough values to unpack (expected 2, got 1) + raise ValueError(f"invalid HTTP header line: {d(line)}") from None + if not _token_re.fullmatch(raw_name): + raise ValueError(f"invalid HTTP header name: {d(raw_name)}") + raw_value = raw_value.strip(b" \t") + if not _value_re.fullmatch(raw_value): + raise ValueError(f"invalid HTTP header value: {d(raw_value)}") + + name = raw_name.decode("ascii") # guaranteed to be ASCII at this point + value = raw_value.decode("ascii", "surrogateescape") + headers[name] = value + + else: + raise exceptions.SecurityError("too many HTTP headers") + + return headers + + +def parse_line( + read_line: Callable[[int], Generator[None, None, bytes]], +) -> Generator[None, None, bytes]: + """ + Parse a single line. + + CRLF is stripped from the return value. + + Args: + read_line: generator-based coroutine that reads a LF-terminated line + or raises an exception if there isn't enough data. + + Raises: + EOFError: if the connection is closed without a CRLF. + SecurityError: if the response exceeds a security limit. + + """ + try: + line = yield from read_line(MAX_LINE) + except RuntimeError: + raise exceptions.SecurityError("line too long") + # Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5 + if not line.endswith(b"\r\n"): + raise EOFError("line without CRLF") + return line[:-2] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/imports.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/imports.py new file mode 100644 index 00000000..a6a59d4c --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/imports.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +import warnings +from typing import Any, Dict, Iterable, Optional + + +__all__ = ["lazy_import"] + + +def import_name(name: str, source: str, namespace: Dict[str, Any]) -> Any: + """ + Import ``name`` from ``source`` in ``namespace``. + + There are two use cases: + + - ``name`` is an object defined in ``source``; + - ``name`` is a submodule of ``source``. + + Neither :func:`__import__` nor :func:`~importlib.import_module` does + exactly this. :func:`__import__` is closer to the intended behavior. + + """ + level = 0 + while source[level] == ".": + level += 1 + assert level < len(source), "importing from parent isn't supported" + module = __import__(source[level:], namespace, None, [name], level) + return getattr(module, name) + + +def lazy_import( + namespace: Dict[str, Any], + aliases: Optional[Dict[str, str]] = None, + deprecated_aliases: Optional[Dict[str, str]] = None, +) -> None: + """ + Provide lazy, module-level imports. + + Typical use:: + + __getattr__, __dir__ = lazy_import( + globals(), + aliases={ + "": "", + ... + }, + deprecated_aliases={ + ..., + } + ) + + This function defines ``__getattr__`` and ``__dir__`` per :pep:`562`. + + """ + if aliases is None: + aliases = {} + if deprecated_aliases is None: + deprecated_aliases = {} + + namespace_set = set(namespace) + aliases_set = set(aliases) + deprecated_aliases_set = set(deprecated_aliases) + + assert not namespace_set & aliases_set, "namespace conflict" + assert not namespace_set & deprecated_aliases_set, "namespace conflict" + assert not aliases_set & deprecated_aliases_set, "namespace conflict" + + package = namespace["__name__"] + + def __getattr__(name: str) -> Any: + assert aliases is not None # mypy cannot figure this out + try: + source = aliases[name] + except KeyError: + pass + else: + return import_name(name, source, namespace) + + assert deprecated_aliases is not None # mypy cannot figure this out + try: + source = deprecated_aliases[name] + except KeyError: + pass + else: + warnings.warn( + f"{package}.{name} is deprecated", + DeprecationWarning, + stacklevel=2, + ) + return import_name(name, source, namespace) + + raise AttributeError(f"module {package!r} has no attribute {name!r}") + + namespace["__getattr__"] = __getattr__ + + def __dir__() -> Iterable[str]: + return sorted(namespace_set | aliases_set | deprecated_aliases_set) + + namespace["__dir__"] = __dir__ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/__init__.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/auth.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/auth.py new file mode 100644 index 00000000..8825c14e --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/auth.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +import functools +import hmac +import http +from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast + +from ..datastructures import Headers +from ..exceptions import InvalidHeader +from ..headers import build_www_authenticate_basic, parse_authorization_basic +from .server import HTTPResponse, WebSocketServerProtocol + + +__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] + +Credentials = Tuple[str, str] + + +def is_credentials(value: Any) -> bool: + try: + username, password = value + except (TypeError, ValueError): + return False + else: + return isinstance(username, str) and isinstance(password, str) + + +class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol): + """ + WebSocket server protocol that enforces HTTP Basic Auth. + + """ + + realm: str = "" + """ + Scope of protection. + + If provided, it should contain only ASCII characters because the + encoding of non-ASCII characters is undefined. + """ + + username: Optional[str] = None + """Username of the authenticated user.""" + + def __init__( + self, + *args: Any, + realm: Optional[str] = None, + check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, + **kwargs: Any, + ) -> None: + if realm is not None: + self.realm = realm # shadow class attribute + self._check_credentials = check_credentials + super().__init__(*args, **kwargs) + + async def check_credentials(self, username: str, password: str) -> bool: + """ + Check whether credentials are authorized. + + This coroutine may be overridden in a subclass, for example to + authenticate against a database or an external service. + + Args: + username: HTTP Basic Auth username. + password: HTTP Basic Auth password. + + Returns: + bool: :obj:`True` if the handshake should continue; + :obj:`False` if it should fail with a HTTP 401 error. + + """ + if self._check_credentials is not None: + return await self._check_credentials(username, password) + + return False + + async def process_request( + self, + path: str, + request_headers: Headers, + ) -> Optional[HTTPResponse]: + """ + Check HTTP Basic Auth and return a HTTP 401 response if needed. + + """ + try: + authorization = request_headers["Authorization"] + except KeyError: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Missing credentials\n", + ) + + try: + username, password = parse_authorization_basic(authorization) + except InvalidHeader: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Unsupported credentials\n", + ) + + if not await self.check_credentials(username, password): + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Invalid credentials\n", + ) + + self.username = username + + return await super().process_request(path, request_headers) + + +def basic_auth_protocol_factory( + realm: Optional[str] = None, + credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None, + check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, + create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None, +) -> Callable[[Any], BasicAuthWebSocketServerProtocol]: + """ + Protocol factory that enforces HTTP Basic Auth. + + :func:`basic_auth_protocol_factory` is designed to integrate with + :func:`~websockets.server.serve` like this:: + + websockets.serve( + ..., + create_protocol=websockets.basic_auth_protocol_factory( + realm="my dev server", + credentials=("hello", "iloveyou"), + ) + ) + + Args: + realm: indicates the scope of protection. It should contain only ASCII + characters because the encoding of non-ASCII characters is + undefined. Refer to section 2.2 of :rfc:`7235` for details. + credentials: defines hard coded authorized credentials. It can be a + ``(username, password)`` pair or a list of such pairs. + check_credentials: defines a coroutine that verifies credentials. + This coroutine receives ``username`` and ``password`` arguments + and returns a :class:`bool`. One of ``credentials`` or + ``check_credentials`` must be provided but not both. + create_protocol: factory that creates the protocol. By default, this + is :class:`BasicAuthWebSocketServerProtocol`. It can be replaced + by a subclass. + Raises: + TypeError: if the ``credentials`` or ``check_credentials`` argument is + wrong. + + """ + if (credentials is None) == (check_credentials is None): + raise TypeError("provide either credentials or check_credentials") + + if credentials is not None: + if is_credentials(credentials): + credentials_list = [cast(Credentials, credentials)] + elif isinstance(credentials, Iterable): + credentials_list = list(credentials) + if not all(is_credentials(item) for item in credentials_list): + raise TypeError(f"invalid credentials argument: {credentials}") + else: + raise TypeError(f"invalid credentials argument: {credentials}") + + credentials_dict = dict(credentials_list) + + async def check_credentials(username: str, password: str) -> bool: + try: + expected_password = credentials_dict[username] + except KeyError: + return False + return hmac.compare_digest(expected_password, password) + + if create_protocol is None: + # Not sure why mypy cannot figure this out. + create_protocol = cast( + Callable[[Any], BasicAuthWebSocketServerProtocol], + BasicAuthWebSocketServerProtocol, + ) + + return functools.partial( + create_protocol, + realm=realm, + check_credentials=check_credentials, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/client.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/client.py new file mode 100644 index 00000000..fadc3efe --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/client.py @@ -0,0 +1,709 @@ +from __future__ import annotations + +import asyncio +import functools +import logging +import random +import urllib.parse +import warnings +from types import TracebackType +from typing import ( + Any, + AsyncIterator, + Callable, + Generator, + List, + Optional, + Sequence, + Tuple, + Type, + cast, +) + +from ..datastructures import Headers, HeadersLike +from ..exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidMessage, + InvalidStatusCode, + NegotiationError, + RedirectHandshake, + SecurityError, +) +from ..extensions import ClientExtensionFactory, Extension +from ..extensions.permessage_deflate import enable_client_permessage_deflate +from ..headers import ( + build_authorization_basic, + build_extension, + build_host, + build_subprotocol, + parse_extension, + parse_subprotocol, + validate_subprotocols, +) +from ..http import USER_AGENT +from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol +from ..uri import WebSocketURI, parse_uri +from .handshake import build_request, check_response +from .http import read_response +from .protocol import WebSocketCommonProtocol + + +__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] + + +class WebSocketClientProtocol(WebSocketCommonProtocol): + """ + WebSocket client connection. + + :class:`WebSocketClientProtocol` provides :meth:`recv` and :meth:`send` + coroutines for receiving and sending messages. + + It supports asynchronous iteration to receive incoming messages:: + + async for message in websocket: + await process(message) + + The iterator exits normally when the connection is closed with close code + 1000 (OK) or 1001 (going away). It raises + a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection + is closed with any other code. + + See :func:`connect` for the documentation of ``logger``, ``origin``, + ``extensions``, ``subprotocols``, and ``extra_headers``. + + See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the + documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``. + + """ + + is_client = True + side = "client" + + def __init__( + self, + *, + logger: Optional[LoggerLike] = None, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + **kwargs: Any, + ) -> None: + if logger is None: + logger = logging.getLogger("websockets.client") + super().__init__(logger=logger, **kwargs) + self.origin = origin + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + + def write_http_request(self, path: str, headers: Headers) -> None: + """ + Write request line and headers to the HTTP request. + + """ + self.path = path + self.request_headers = headers + + if self.debug: + self.logger.debug("> GET %s HTTP/1.1", path) + for key, value in headers.raw_items(): + self.logger.debug("> %s: %s", key, value) + + # Since the path and headers only contain ASCII characters, + # we can keep this simple. + request = f"GET {path} HTTP/1.1\r\n" + request += str(headers) + + self.transport.write(request.encode()) + + async def read_http_response(self) -> Tuple[int, Headers]: + """ + Read status line and headers from the HTTP response. + + If the response contains a body, it may be read from ``self.reader`` + after this coroutine returns. + + Raises: + InvalidMessage: if the HTTP message is malformed or isn't an + HTTP/1.1 GET response. + + """ + try: + status_code, reason, headers = await read_response(self.reader) + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. + except asyncio.CancelledError: # pragma: no cover + raise + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP response") from exc + + if self.debug: + self.logger.debug("< HTTP/1.1 %d %s", status_code, reason) + for key, value in headers.raw_items(): + self.logger.debug("< %s: %s", key, value) + + self.response_headers = headers + + return status_code, self.response_headers + + @staticmethod + def process_extensions( + headers: Headers, + available_extensions: Optional[Sequence[ClientExtensionFactory]], + ) -> List[Extension]: + """ + Handle the Sec-WebSocket-Extensions HTTP response header. + + Check that each extension is supported, as well as its parameters. + + Return the list of accepted extensions. + + Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the + connection. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension accepted by + the server, we check for a match with each extension available in the + client configuration. If no match is found, an exception is raised. + + If several variants of the same extension are accepted by the server, + it may be configured several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + """ + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values: + + if available_extensions is None: + raise InvalidHandshake("no extensions supported") + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, response_params in parsed_header_values: + + for extension_factory in available_extensions: + + # Skip non-matching extensions based on their name. + if extension_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + extension = extension_factory.process_response_params( + response_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the server sent. Fail the connection. + else: + raise NegotiationError( + f"Unsupported extension: " + f"name = {name}, params = {response_params}" + ) + + return accepted_extensions + + @staticmethod + def process_subprotocol( + headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP response header. + + Check that it contains exactly one supported subprotocol. + + Return the selected subprotocol. + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values: + + if available_subprotocols is None: + raise InvalidHandshake("no subprotocols supported") + + parsed_header_values: Sequence[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + if len(parsed_header_values) > 1: + subprotocols = ", ".join(parsed_header_values) + raise InvalidHandshake(f"multiple subprotocols: {subprotocols}") + + subprotocol = parsed_header_values[0] + + if subprotocol not in available_subprotocols: + raise NegotiationError(f"unsupported subprotocol: {subprotocol}") + + return subprotocol + + async def handshake( + self, + wsuri: WebSocketURI, + origin: Optional[Origin] = None, + available_extensions: Optional[Sequence[ClientExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + ) -> None: + """ + Perform the client side of the opening handshake. + + Args: + wsuri: URI of the WebSocket server. + origin: value of the ``Origin`` header. + available_extensions: list of supported extensions, in order in + which they should be tried. + available_subprotocols: list of supported subprotocols, in order + of decreasing preference. + extra_headers: arbitrary HTTP headers to add to the request. + + Raises: + InvalidHandshake: if the handshake fails. + + """ + request_headers = Headers() + + request_headers["Host"] = build_host(wsuri.host, wsuri.port, wsuri.secure) + + if wsuri.user_info: + request_headers["Authorization"] = build_authorization_basic( + *wsuri.user_info + ) + + if origin is not None: + request_headers["Origin"] = origin + + key = build_request(request_headers) + + if available_extensions is not None: + extensions_header = build_extension( + [ + (extension_factory.name, extension_factory.get_request_params()) + for extension_factory in available_extensions + ] + ) + request_headers["Sec-WebSocket-Extensions"] = extensions_header + + if available_subprotocols is not None: + protocol_header = build_subprotocol(available_subprotocols) + request_headers["Sec-WebSocket-Protocol"] = protocol_header + + if self.extra_headers is not None: + request_headers.update(self.extra_headers) + + request_headers.setdefault("User-Agent", USER_AGENT) + + self.write_http_request(wsuri.resource_name, request_headers) + + status_code, response_headers = await self.read_http_response() + if status_code in (301, 302, 303, 307, 308): + if "Location" not in response_headers: + raise InvalidHeader("Location") + raise RedirectHandshake(response_headers["Location"]) + elif status_code != 101: + raise InvalidStatusCode(status_code, response_headers) + + check_response(response_headers, key) + + self.extensions = self.process_extensions( + response_headers, available_extensions + ) + + self.subprotocol = self.process_subprotocol( + response_headers, available_subprotocols + ) + + self.connection_open() + + +class Connect: + """ + Connect to the WebSocket server at ``uri``. + + Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which + can then be used to send and receive messages. + + :func:`connect` can be used as a asynchronous context manager:: + + async with websockets.connect(...) as websocket: + ... + + The connection is closed automatically when exiting the context. + + :func:`connect` can be used as an infinite asynchronous iterator to + reconnect automatically on errors:: + + async for websocket in websockets.connect(...): + try: + ... + except websockets.ConnectionClosed: + continue + + The connection is closed automatically after each iteration of the loop. + + If an error occurs while establishing the connection, :func:`connect` + retries with exponential backoff. The backoff delay starts at three + seconds and increases up to one minute. + + If an error occurs in the body of the loop, you can handle the exception + and :func:`connect` will reconnect with the next iteration; or you can + let the exception bubble up and break out of the loop. This lets you + decide which errors trigger a reconnection and which errors are fatal. + + Args: + uri: URI of the WebSocket server. + create_protocol: factory for the :class:`asyncio.Protocol` managing + the connection; defaults to :class:`WebSocketClientProtocol`; may + be set to a wrapper or a subclass to customize connection handling. + logger: logger for this connection; + defaults to ``logging.getLogger("websockets.client")``; + see the :doc:`logging guide <../topics/logging>` for details. + compression: shortcut that enables the "permessage-deflate" extension + by default; may be set to :obj:`None` to disable compression; + see the :doc:`compression guide <../topics/compression>` for details. + origin: value of the ``Origin`` header. This is useful when connecting + to a server that validates the ``Origin`` header to defend against + Cross-Site WebSocket Hijacking attacks. + extensions: list of supported extensions, in order in which they + should be tried. + subprotocols: list of supported subprotocols, in order of decreasing + preference. + extra_headers: arbitrary HTTP headers to add to the request. + open_timeout: timeout for opening the connection in seconds; + :obj:`None` to disable the timeout + + See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the + documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``. + + Any other keyword arguments are passed the event loop's + :meth:`~asyncio.loop.create_connection` method. + + For example: + + * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enforce TLS + settings. When connecting to a ``wss://`` URI, if ``ssl`` isn't + provided, a TLS context is created + with :func:`~ssl.create_default_context`. + + * You can set ``host`` and ``port`` to connect to a different host and + port from those found in ``uri``. This only changes the destination of + the TCP connection. The host name from ``uri`` is still used in the TLS + handshake for secure connections and in the ``Host`` header. + + Returns: + WebSocketClientProtocol: WebSocket connection. + + Raises: + InvalidURI: if ``uri`` isn't a valid WebSocket URI. + InvalidHandshake: if the opening handshake fails. + ~asyncio.TimeoutError: if the opening handshake times out. + + """ + + MAX_REDIRECTS_ALLOWED = 10 + + def __init__( + self, + uri: str, + *, + create_protocol: Optional[Callable[[Any], WebSocketClientProtocol]] = None, + logger: Optional[LoggerLike] = None, + compression: Optional[str] = "deflate", + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + open_timeout: Optional[float] = 10, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2**20, + max_queue: Optional[int] = 2**5, + read_limit: int = 2**16, + write_limit: int = 2**16, + **kwargs: Any, + ) -> None: + # Backwards compatibility: close_timeout used to be called timeout. + timeout: Optional[float] = kwargs.pop("timeout", None) + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + # Backwards compatibility: create_protocol used to be called klass. + klass: Optional[Type[WebSocketClientProtocol]] = kwargs.pop("klass", None) + if klass is None: + klass = WebSocketClientProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) + # If both are specified, klass is ignored. + if create_protocol is None: + create_protocol = klass + + # Backwards compatibility: recv() used to return None on closed connections + legacy_recv: bool = kwargs.pop("legacy_recv", False) + + # Backwards compatibility: the loop parameter used to be supported. + _loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None) + if _loop is None: + loop = asyncio.get_event_loop() + else: + loop = _loop + warnings.warn("remove loop argument", DeprecationWarning) + + wsuri = parse_uri(uri) + if wsuri.secure: + kwargs.setdefault("ssl", True) + elif kwargs.get("ssl") is not None: + raise ValueError( + "connect() received a ssl argument for a ws:// URI, " + "use a wss:// URI to enable TLS" + ) + + if compression == "deflate": + extensions = enable_client_permessage_deflate(extensions) + elif compression is not None: + raise ValueError(f"unsupported compression: {compression}") + + if subprotocols is not None: + validate_subprotocols(subprotocols) + + factory = functools.partial( + create_protocol, + logger=logger, + origin=origin, + extensions=extensions, + subprotocols=subprotocols, + extra_headers=extra_headers, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + close_timeout=close_timeout, + max_size=max_size, + max_queue=max_queue, + read_limit=read_limit, + write_limit=write_limit, + host=wsuri.host, + port=wsuri.port, + secure=wsuri.secure, + legacy_recv=legacy_recv, + loop=_loop, + ) + + if kwargs.pop("unix", False): + path: Optional[str] = kwargs.pop("path", None) + create_connection = functools.partial( + loop.create_unix_connection, factory, path, **kwargs + ) + else: + host: Optional[str] + port: Optional[int] + if kwargs.get("sock") is None: + host, port = wsuri.host, wsuri.port + else: + # If sock is given, host and port shouldn't be specified. + host, port = None, None + # If host and port are given, override values from the URI. + host = kwargs.pop("host", host) + port = kwargs.pop("port", port) + create_connection = functools.partial( + loop.create_connection, factory, host, port, **kwargs + ) + + self.open_timeout = open_timeout + if logger is None: + logger = logging.getLogger("websockets.client") + self.logger = logger + + # This is a coroutine function. + self._create_connection = create_connection + self._uri = uri + self._wsuri = wsuri + + def handle_redirect(self, uri: str) -> None: + # Update the state of this instance to connect to a new URI. + old_uri = self._uri + old_wsuri = self._wsuri + new_uri = urllib.parse.urljoin(old_uri, uri) + new_wsuri = parse_uri(new_uri) + + # Forbid TLS downgrade. + if old_wsuri.secure and not new_wsuri.secure: + raise SecurityError("redirect from WSS to WS") + + same_origin = ( + old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port + ) + + # Rewrite the host and port arguments for cross-origin redirects. + # This preserves connection overrides with the host and port + # arguments if the redirect points to the same host and port. + if not same_origin: + # Replace the host and port argument passed to the protocol factory. + factory = self._create_connection.args[0] + factory = functools.partial( + factory.func, + *factory.args, + **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port), + ) + # Replace the host and port argument passed to create_connection. + self._create_connection = functools.partial( + self._create_connection.func, + *(factory, new_wsuri.host, new_wsuri.port), + **self._create_connection.keywords, + ) + + # Set the new WebSocket URI. This suffices for same-origin redirects. + self._uri = new_uri + self._wsuri = new_wsuri + + # async for ... in connect(...): + + BACKOFF_MIN = 1.92 + BACKOFF_MAX = 60.0 + BACKOFF_FACTOR = 1.618 + BACKOFF_INITIAL = 5 + + async def __aiter__(self) -> AsyncIterator[WebSocketClientProtocol]: + backoff_delay = self.BACKOFF_MIN + while True: + try: + async with self as protocol: + yield protocol + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. + except asyncio.CancelledError: # pragma: no cover + raise + except Exception: + # Add a random initial delay between 0 and 5 seconds. + # See 7.2.3. Recovering from Abnormal Closure in RFC 6544. + if backoff_delay == self.BACKOFF_MIN: + initial_delay = random.random() * self.BACKOFF_INITIAL + self.logger.info( + "! connect failed; reconnecting in %.1f seconds", + initial_delay, + exc_info=True, + ) + await asyncio.sleep(initial_delay) + else: + self.logger.info( + "! connect failed again; retrying in %d seconds", + int(backoff_delay), + exc_info=True, + ) + await asyncio.sleep(int(backoff_delay)) + # Increase delay with truncated exponential backoff. + backoff_delay = backoff_delay * self.BACKOFF_FACTOR + backoff_delay = min(backoff_delay, self.BACKOFF_MAX) + continue + else: + # Connection succeeded - reset backoff delay + backoff_delay = self.BACKOFF_MIN + + # async with connect(...) as ...: + + async def __aenter__(self) -> WebSocketClientProtocol: + return await self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.protocol.close() + + # ... = await connect(...) + + def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl_timeout__().__await__() + + async def __await_impl_timeout__(self) -> WebSocketClientProtocol: + return await asyncio.wait_for(self.__await_impl__(), self.open_timeout) + + async def __await_impl__(self) -> WebSocketClientProtocol: + for redirects in range(self.MAX_REDIRECTS_ALLOWED): + transport, protocol = await self._create_connection() + protocol = cast(WebSocketClientProtocol, protocol) + + try: + await protocol.handshake( + self._wsuri, + origin=protocol.origin, + available_extensions=protocol.available_extensions, + available_subprotocols=protocol.available_subprotocols, + extra_headers=protocol.extra_headers, + ) + except RedirectHandshake as exc: + protocol.fail_connection() + await protocol.wait_closed() + self.handle_redirect(exc.uri) + # Avoid leaking a connected socket when the handshake fails. + except (Exception, asyncio.CancelledError): + protocol.fail_connection() + await protocol.wait_closed() + raise + else: + self.protocol = protocol + return protocol + else: + raise SecurityError("too many redirects") + + # ... = yield from connect(...) - remove when dropping Python < 3.10 + + __iter__ = __await__ + + +connect = Connect + + +def unix_connect( + path: Optional[str] = None, + uri: str = "ws://localhost/", + **kwargs: Any, +) -> Connect: + """ + Similar to :func:`connect`, but for connecting to a Unix socket. + + This function builds upon the event loop's + :meth:`~asyncio.loop.create_unix_connection` method. + + It is only available on Unix. + + It's mainly useful for debugging servers listening on Unix sockets. + + Args: + path: file system path to the Unix socket. + uri: URI of the WebSocket server; the host is used in the TLS + handshake for secure connections and in the ``Host`` header. + + """ + return connect(uri=uri, path=path, unix=True, **kwargs) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/compatibility.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/compatibility.py new file mode 100644 index 00000000..df81de9d --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/compatibility.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import asyncio +import sys +from typing import Any, Dict + + +def loop_if_py_lt_38(loop: asyncio.AbstractEventLoop) -> Dict[str, Any]: + """ + Helper for the removal of the loop argument in Python 3.10. + + """ + return {"loop": loop} if sys.version_info[:2] < (3, 8) else {} diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/framing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/framing.py new file mode 100644 index 00000000..c4de7eb2 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/framing.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +import dataclasses +import struct +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple + +from .. import extensions, frames +from ..exceptions import PayloadTooBig, ProtocolError + + +try: + from ..speedups import apply_mask +except ImportError: # pragma: no cover + from ..utils import apply_mask + + +class Frame(NamedTuple): + + fin: bool + opcode: frames.Opcode + data: bytes + rsv1: bool = False + rsv2: bool = False + rsv3: bool = False + + @property + def new_frame(self) -> frames.Frame: + return frames.Frame( + self.opcode, + self.data, + self.fin, + self.rsv1, + self.rsv2, + self.rsv3, + ) + + def __str__(self) -> str: + return str(self.new_frame) + + def check(self) -> None: + return self.new_frame.check() + + @classmethod + async def read( + cls, + reader: Callable[[int], Awaitable[bytes]], + *, + mask: bool, + max_size: Optional[int] = None, + extensions: Optional[Sequence[extensions.Extension]] = None, + ) -> Frame: + """ + Read a WebSocket frame. + + Args: + reader: coroutine that reads exactly the requested number of + bytes, unless the end of file is reached. + mask: whether the frame should be masked i.e. whether the read + happens on the server side. + max_size: maximum payload size in bytes. + extensions: list of extensions, applied in reverse order. + + Raises: + PayloadTooBig: if the frame exceeds ``max_size``. + ProtocolError: if the frame contains incorrect values. + + """ + + # Read the header. + data = await reader(2) + head1, head2 = struct.unpack("!BB", data) + + # While not Pythonic, this is marginally faster than calling bool(). + fin = True if head1 & 0b10000000 else False + rsv1 = True if head1 & 0b01000000 else False + rsv2 = True if head1 & 0b00100000 else False + rsv3 = True if head1 & 0b00010000 else False + + try: + opcode = frames.Opcode(head1 & 0b00001111) + except ValueError as exc: + raise ProtocolError("invalid opcode") from exc + + if (True if head2 & 0b10000000 else False) != mask: + raise ProtocolError("incorrect masking") + + length = head2 & 0b01111111 + if length == 126: + data = await reader(2) + (length,) = struct.unpack("!H", data) + elif length == 127: + data = await reader(8) + (length,) = struct.unpack("!Q", data) + if max_size is not None and length > max_size: + raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)") + if mask: + mask_bits = await reader(4) + + # Read the data. + data = await reader(length) + if mask: + data = apply_mask(data, mask_bits) + + new_frame = frames.Frame(opcode, data, fin, rsv1, rsv2, rsv3) + + if extensions is None: + extensions = [] + for extension in reversed(extensions): + new_frame = extension.decode(new_frame, max_size=max_size) + + new_frame.check() + + return cls( + new_frame.fin, + new_frame.opcode, + new_frame.data, + new_frame.rsv1, + new_frame.rsv2, + new_frame.rsv3, + ) + + def write( + self, + write: Callable[[bytes], Any], + *, + mask: bool, + extensions: Optional[Sequence[extensions.Extension]] = None, + ) -> None: + """ + Write a WebSocket frame. + + Args: + frame: frame to write. + write: function that writes bytes. + mask: whether the frame should be masked i.e. whether the write + happens on the client side. + extensions: list of extensions, applied in order. + + Raises: + ProtocolError: if the frame contains incorrect values. + + """ + # The frame is written in a single call to write in order to prevent + # TCP fragmentation. See #68 for details. This also makes it safe to + # send frames concurrently from multiple coroutines. + write(self.new_frame.serialize(mask=mask, extensions=extensions)) + + +# Backwards compatibility with previously documented public APIs + +from ..frames import Close, prepare_ctrl as encode_data, prepare_data # noqa + + +def parse_close(data: bytes) -> Tuple[int, str]: + """ + Parse the payload from a close frame. + + Returns: + Tuple[int, str]: close code and reason. + + Raises: + ProtocolError: if data is ill-formed. + UnicodeDecodeError: if the reason isn't valid UTF-8. + + """ + return dataclasses.astuple(Close.parse(data)) # type: ignore + + +def serialize_close(code: int, reason: str) -> bytes: + """ + Serialize the payload for a close frame. + + """ + return Close(code, reason).serialize() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/handshake.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/handshake.py new file mode 100644 index 00000000..569937bb --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/handshake.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import base64 +import binascii +from typing import List + +from ..datastructures import Headers, MultipleValuesError +from ..exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade +from ..headers import parse_connection, parse_upgrade +from ..typing import ConnectionOption, UpgradeProtocol +from ..utils import accept_key as accept, generate_key + + +__all__ = ["build_request", "check_request", "build_response", "check_response"] + + +def build_request(headers: Headers) -> str: + """ + Build a handshake request to send to the server. + + Update request headers passed in argument. + + Args: + headers: handshake request headers. + + Returns: + str: ``key`` that must be passed to :func:`check_response`. + + """ + key = generate_key() + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Key"] = key + headers["Sec-WebSocket-Version"] = "13" + return key + + +def check_request(headers: Headers) -> str: + """ + Check a handshake request received from the client. + + This function doesn't verify that the request is an HTTP/1.1 or higher GET + request and doesn't perform ``Host`` and ``Origin`` checks. These controls + are usually performed earlier in the HTTP request handling code. They're + the responsibility of the caller. + + Args: + headers: handshake request headers. + + Returns: + str: ``key`` that must be passed to :func:`build_response`. + + Raises: + InvalidHandshake: if the handshake request is invalid; + then the server must return 400 Bad Request error. + + """ + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", ", ".join(connection)) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) + + try: + s_w_key = headers["Sec-WebSocket-Key"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Key") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" + ) from exc + + try: + raw_key = base64.b64decode(s_w_key.encode(), validate=True) + except binascii.Error as exc: + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) from exc + if len(raw_key) != 16: + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) + + try: + s_w_version = headers["Sec-WebSocket-Version"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Version") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found" + ) from exc + + if s_w_version != "13": + raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version) + + return s_w_key + + +def build_response(headers: Headers, key: str) -> None: + """ + Build a handshake response to send to the client. + + Update response headers passed in argument. + + Args: + headers: handshake response headers. + key: returned by :func:`check_request`. + + """ + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Accept"] = accept(key) + + +def check_response(headers: Headers, key: str) -> None: + """ + Check a handshake response received from the server. + + This function doesn't verify that the response is an HTTP/1.1 or higher + response with a 101 status code. These controls are the responsibility of + the caller. + + Args: + headers: handshake response headers. + key: returned by :func:`build_request`. + + Raises: + InvalidHandshake: if the handshake response is invalid. + + """ + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", " ".join(connection)) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) + + try: + s_w_accept = headers["Sec-WebSocket-Accept"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Accept") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found" + ) from exc + + if s_w_accept != accept(key): + raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/http.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/http.py new file mode 100644 index 00000000..cc2ef1f0 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/http.py @@ -0,0 +1,201 @@ +from __future__ import annotations + +import asyncio +import re +from typing import Tuple + +from ..datastructures import Headers +from ..exceptions import SecurityError + + +__all__ = ["read_request", "read_response"] + +MAX_HEADERS = 256 +MAX_LINE = 4110 + + +def d(value: bytes) -> str: + """ + Decode a bytestring for interpolating into an error message. + + """ + return value.decode(errors="backslashreplace") + + +# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B. + +# Regex for validating header names. + +_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") + +# Regex for validating header values. + +# We don't attempt to support obsolete line folding. + +# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff). + +# The ABNF is complicated because it attempts to express that optional +# whitespace is ignored. We strip whitespace and don't revalidate that. + +# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 + +_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") + + +async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]: + """ + Read an HTTP/1.1 GET request and return ``(path, headers)``. + + ``path`` isn't URL-decoded or validated in any way. + + ``path`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`read_request` doesn't attempt to read the request body because + WebSocket handshake requests don't have one. If the request contains a + body, it may be read from ``stream`` after this coroutine returns. + + Args: + stream: input to read the request from + + Raises: + EOFError: if the connection is closed without a full HTTP request + SecurityError: if the request exceeds a security limit + ValueError: if the request isn't well formatted + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1 + + # Parsing is simple because fixed values are expected for method and + # version and because path isn't checked. Since WebSocket software tends + # to implement HTTP/1.1 strictly, there's little need for lenient parsing. + + try: + request_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP request line") from exc + + try: + method, raw_path, version = request_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None + + if method != b"GET": + raise ValueError(f"unsupported HTTP method: {d(method)}") + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + path = raw_path.decode("ascii", "surrogateescape") + + headers = await read_headers(stream) + + return path, headers + + +async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]: + """ + Read an HTTP/1.1 response and return ``(status_code, reason, headers)``. + + ``reason`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`read_request` doesn't attempt to read the response body because + WebSocket handshake responses don't have one. If the response contains a + body, it may be read from ``stream`` after this coroutine returns. + + Args: + stream: input to read the response from + + Raises: + EOFError: if the connection is closed without a full HTTP response + SecurityError: if the response exceeds a security limit + ValueError: if the response isn't well formatted + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2 + + # As in read_request, parsing is simple because a fixed value is expected + # for version, status_code is a 3-digit number, and reason can be ignored. + + try: + status_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP status line") from exc + + try: + version, raw_status_code, raw_reason = status_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None + + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + try: + status_code = int(raw_status_code) + except ValueError: # invalid literal for int() with base 10 + raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None + if not 100 <= status_code < 1000: + raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") + if not _value_re.fullmatch(raw_reason): + raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") + reason = raw_reason.decode() + + headers = await read_headers(stream) + + return status_code, reason, headers + + +async def read_headers(stream: asyncio.StreamReader) -> Headers: + """ + Read HTTP headers from ``stream``. + + Non-ASCII characters are represented with surrogate escapes. + + """ + # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2 + + # We don't attempt to support obsolete line folding. + + headers = Headers() + for _ in range(MAX_HEADERS + 1): + try: + line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP headers") from exc + if line == b"": + break + + try: + raw_name, raw_value = line.split(b":", 1) + except ValueError: # not enough values to unpack (expected 2, got 1) + raise ValueError(f"invalid HTTP header line: {d(line)}") from None + if not _token_re.fullmatch(raw_name): + raise ValueError(f"invalid HTTP header name: {d(raw_name)}") + raw_value = raw_value.strip(b" \t") + if not _value_re.fullmatch(raw_value): + raise ValueError(f"invalid HTTP header value: {d(raw_value)}") + + name = raw_name.decode("ascii") # guaranteed to be ASCII at this point + value = raw_value.decode("ascii", "surrogateescape") + headers[name] = value + + else: + raise SecurityError("too many HTTP headers") + + return headers + + +async def read_line(stream: asyncio.StreamReader) -> bytes: + """ + Read a single line from ``stream``. + + CRLF is stripped from the return value. + + """ + # Security: this is bounded by the StreamReader's limit (default = 32 KiB). + line = await stream.readline() + # Security: this guarantees header values are small (hard-coded = 4 KiB) + if len(line) > MAX_LINE: + raise SecurityError("line too long") + # Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5 + if not line.endswith(b"\r\n"): + raise EOFError("line without CRLF") + return line[:-2] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/protocol.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/protocol.py new file mode 100644 index 00000000..d1d52bfa --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/protocol.py @@ -0,0 +1,1605 @@ +from __future__ import annotations + +import asyncio +import codecs +import collections +import logging +import random +import ssl +import struct +import uuid +import warnings +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Awaitable, + Deque, + Dict, + Iterable, + List, + Mapping, + Optional, + Union, + cast, +) + +from ..connection import State +from ..datastructures import Headers +from ..exceptions import ( + ConnectionClosed, + ConnectionClosedError, + ConnectionClosedOK, + InvalidState, + PayloadTooBig, + ProtocolError, +) +from ..extensions import Extension +from ..frames import ( + OK_CLOSE_CODES, + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + Close, + Opcode, + prepare_ctrl, + prepare_data, +) +from ..typing import Data, LoggerLike, Subprotocol +from .compatibility import loop_if_py_lt_38 +from .framing import Frame + + +__all__ = ["WebSocketCommonProtocol", "broadcast"] + + +# In order to ensure consistency, the code always checks the current value of +# WebSocketCommonProtocol.state before assigning a new value and never yields +# between the check and the assignment. + + +class WebSocketCommonProtocol(asyncio.Protocol): + """ + WebSocket connection. + + :class:`WebSocketCommonProtocol` provides APIs shared between WebSocket + servers and clients. You shouldn't use it directly. Instead, use + :class:`~websockets.client.WebSocketClientProtocol` or + :class:`~websockets.server.WebSocketServerProtocol`. + + This documentation focuses on low-level details that aren't covered in the + documentation of :class:`~websockets.client.WebSocketClientProtocol` and + :class:`~websockets.server.WebSocketServerProtocol` for the sake of + simplicity. + + Once the connection is open, a Ping_ frame is sent every ``ping_interval`` + seconds. This serves as a keepalive. It helps keeping the connection + open, especially in the presence of proxies with short timeouts on + inactive connections. Set ``ping_interval`` to :obj:`None` to disable + this behavior. + + .. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2 + + If the corresponding Pong_ frame isn't received within ``ping_timeout`` + seconds, the connection is considered unusable and is closed with code + 1011. This ensures that the remote endpoint remains responsive. Set + ``ping_timeout`` to :obj:`None` to disable this behavior. + + .. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3 + + The ``close_timeout`` parameter defines a maximum wait time for completing + the closing handshake and terminating the TCP connection. For legacy + reasons, :meth:`close` completes in at most ``5 * close_timeout`` seconds + for clients and ``4 * close_timeout`` for servers. + + See the discussion of :doc:`timeouts <../topics/timeouts>` for details. + + ``close_timeout`` needs to be a parameter of the protocol because + websockets usually calls :meth:`close` implicitly upon exit: + + * on the client side, when :func:`~websockets.client.connect` is used as a + context manager; + * on the server side, when the connection handler terminates; + + To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. + + The ``max_size`` parameter enforces the maximum size for incoming messages + in bytes. The default value is 1 MiB. If a larger message is received, + :meth:`recv` will raise :exc:`~websockets.exceptions.ConnectionClosedError` + and the connection will be closed with code 1009. + + The ``max_queue`` parameter sets the maximum length of the queue that + holds incoming messages. The default value is ``32``. Messages are added + to an in-memory queue when they're received; then :meth:`recv` pops from + that queue. In order to prevent excessive memory consumption when + messages are received faster than they can be processed, the queue must + be bounded. If the queue fills up, the protocol stops processing incoming + data until :meth:`recv` is called. In this situation, various receive + buffers (at least in :mod:`asyncio` and in the OS) will fill up, then the + TCP receive window will shrink, slowing down transmission to avoid packet + loss. + + Since Python can use up to 4 bytes of memory to represent a single + character, each connection may use up to ``4 * max_size * max_queue`` + bytes of memory to store incoming messages. By default, this is 128 MiB. + You may want to lower the limits, depending on your application's + requirements. + + The ``read_limit`` argument sets the high-water limit of the buffer for + incoming bytes. The low-water limit is half the high-water limit. The + default value is 64 KiB, half of asyncio's default (based on the current + implementation of :class:`~asyncio.StreamReader`). + + The ``write_limit`` argument sets the high-water limit of the buffer for + outgoing bytes. The low-water limit is a quarter of the high-water limit. + The default value is 64 KiB, equal to asyncio's default (based on the + current implementation of ``FlowControlMixin``). + + See the discussion of :doc:`memory usage <../topics/memory>` for details. + + Args: + logger: logger for this connection; + defaults to ``logging.getLogger("websockets.protocol")``; + see the :doc:`logging guide <../topics/logging>` for details. + ping_interval: delay between keepalive pings in seconds; + :obj:`None` to disable keepalive pings. + ping_timeout: timeout for keepalive pings in seconds; + :obj:`None` to disable timeouts. + close_timeout: timeout for closing the connection in seconds; + for legacy reasons, the actual timeout is 4 or 5 times larger. + max_size: maximum size of incoming messages in bytes; + :obj:`None` to disable the limit. + max_queue: maximum number of incoming messages in receive buffer; + :obj:`None` to disable the limit. + read_limit: high-water mark of read buffer in bytes. + write_limit: high-water mark of write buffer in bytes. + + """ + + # There are only two differences between the client-side and server-side + # behavior: masking the payload and closing the underlying TCP connection. + # Set is_client = True/False and side = "client"/"server" to pick a side. + is_client: bool + side: str = "undefined" + + def __init__( + self, + *, + logger: Optional[LoggerLike] = None, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2**20, + max_queue: Optional[int] = 2**5, + read_limit: int = 2**16, + write_limit: int = 2**16, + # The following arguments are kept only for backwards compatibility. + host: Optional[str] = None, + port: Optional[int] = None, + secure: Optional[bool] = None, + legacy_recv: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout: Optional[float] = None, + ) -> None: + if legacy_recv: # pragma: no cover + warnings.warn("legacy_recv is deprecated", DeprecationWarning) + + # Backwards compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + # Backwards compatibility: the loop parameter used to be supported. + if loop is None: + loop = asyncio.get_event_loop() + else: + warnings.warn("remove loop argument", DeprecationWarning) + + self.ping_interval = ping_interval + self.ping_timeout = ping_timeout + self.close_timeout = close_timeout + self.max_size = max_size + self.max_queue = max_queue + self.read_limit = read_limit + self.write_limit = write_limit + + # Unique identifier. For logs. + self.id: uuid.UUID = uuid.uuid4() + """Unique identifier of the connection. Useful in logs.""" + + # Logger or LoggerAdapter for this connection. + if logger is None: + logger = logging.getLogger("websockets.protocol") + # https://github.com/python/typeshed/issues/5561 + logger = cast(logging.Logger, logger) + self.logger: LoggerLike = logging.LoggerAdapter(logger, {"websocket": self}) + """Logger for this connection.""" + + # Track if DEBUG is enabled. Shortcut logging calls if it isn't. + self.debug = logger.isEnabledFor(logging.DEBUG) + + self.loop = loop + + self._host = host + self._port = port + self._secure = secure + self.legacy_recv = legacy_recv + + # Configure read buffer limits. The high-water limit is defined by + # ``self.read_limit``. The ``limit`` argument controls the line length + # limit and half the buffer limit of :class:`~asyncio.StreamReader`. + # That's why it must be set to half of ``self.read_limit``. + self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) + + # Copied from asyncio.FlowControlMixin + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + + self._drain_lock = asyncio.Lock(**loop_if_py_lt_38(loop)) + + # This class implements the data transfer and closing handshake, which + # are shared between the client-side and the server-side. + # Subclasses implement the opening handshake and, on success, execute + # :meth:`connection_open` to change the state to OPEN. + self.state = State.CONNECTING + if self.debug: + self.logger.debug("= connection is CONNECTING") + + # HTTP protocol parameters. + self.path: str + """Path of the opening handshake request.""" + self.request_headers: Headers + """Opening handshake request headers.""" + self.response_headers: Headers + """Opening handshake response headers.""" + + # WebSocket protocol parameters. + self.extensions: List[Extension] = [] + self.subprotocol: Optional[Subprotocol] = None + """Subprotocol, if one was negotiated.""" + + # Close code and reason, set when a close frame is sent or received. + self.close_rcvd: Optional[Close] = None + self.close_sent: Optional[Close] = None + self.close_rcvd_then_sent: Optional[bool] = None + + # Completed when the connection state becomes CLOSED. Translates the + # :meth:`connection_lost` callback to a :class:`~asyncio.Future` + # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are + # translated by ``self.stream_reader``). + self.connection_lost_waiter: asyncio.Future[None] = loop.create_future() + + # Queue of received messages. + self.messages: Deque[Data] = collections.deque() + self._pop_message_waiter: Optional[asyncio.Future[None]] = None + self._put_message_waiter: Optional[asyncio.Future[None]] = None + + # Protect sending fragmented messages. + self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None + + # Mapping of ping IDs to pong waiters, in chronological order. + self.pings: Dict[bytes, asyncio.Future[None]] = {} + + # Task running the data transfer. + self.transfer_data_task: asyncio.Task[None] + + # Exception that occurred during data transfer, if any. + self.transfer_data_exc: Optional[BaseException] = None + + # Task sending keepalive pings. + self.keepalive_ping_task: asyncio.Task[None] + + # Task closing the TCP connection. + self.close_connection_task: asyncio.Task[None] + + # Copied from asyncio.FlowControlMixin + async def _drain_helper(self) -> None: # pragma: no cover + if self.connection_lost_waiter.done(): + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + assert waiter is None or waiter.cancelled() + waiter = self.loop.create_future() + self._drain_waiter = waiter + await waiter + + # Copied from asyncio.StreamWriter + async def _drain(self) -> None: # pragma: no cover + if self.reader is not None: + exc = self.reader.exception() + if exc is not None: + raise exc + if self.transport is not None: + if self.transport.is_closing(): + # Yield to the event loop so connection_lost() may be + # called. Without this, _drain_helper() would return + # immediately, and code that calls + # write(...); yield from drain() + # in a loop would never call connection_lost(), so it + # would not see an error when the socket is closed. + await asyncio.sleep(0, **loop_if_py_lt_38(self.loop)) + await self._drain_helper() + + def connection_open(self) -> None: + """ + Callback when the WebSocket opening handshake completes. + + Enter the OPEN state and start the data transfer phase. + + """ + # 4.1. The WebSocket Connection is Established. + assert self.state is State.CONNECTING + self.state = State.OPEN + if self.debug: + self.logger.debug("= connection is OPEN") + # Start the task that receives incoming WebSocket messages. + self.transfer_data_task = self.loop.create_task(self.transfer_data()) + # Start the task that sends pings at regular intervals. + self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping()) + # Start the task that eventually closes the TCP connection. + self.close_connection_task = self.loop.create_task(self.close_connection()) + + @property + def host(self) -> Optional[str]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning) + return self._host + + @property + def port(self) -> Optional[int]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning) + return self._port + + @property + def secure(self) -> Optional[bool]: + warnings.warn("don't use secure", DeprecationWarning) + return self._secure + + # Public API + + @property + def local_address(self) -> Any: + """ + Local address of the connection. + + For IPv4 connections, this is a ``(host, port)`` tuple. + + The format of the address depends on the address family; + see :meth:`~socket.socket.getsockname`. + + :obj:`None` if the TCP connection isn't established yet. + + """ + try: + transport = self.transport + except AttributeError: + return None + else: + return transport.get_extra_info("sockname") + + @property + def remote_address(self) -> Any: + """ + Remote address of the connection. + + For IPv4 connections, this is a ``(host, port)`` tuple. + + The format of the address depends on the address family; + see :meth:`~socket.socket.getpeername`. + + :obj:`None` if the TCP connection isn't established yet. + + """ + try: + transport = self.transport + except AttributeError: + return None + else: + return transport.get_extra_info("peername") + + @property + def open(self) -> bool: + """ + :obj:`True` when the connection is open; :obj:`False` otherwise. + + This attribute may be used to detect disconnections. However, this + approach is discouraged per the EAFP_ principle. Instead, you should + handle :exc:`~websockets.exceptions.ConnectionClosed` exceptions. + + .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp + + """ + return self.state is State.OPEN and not self.transfer_data_task.done() + + @property + def closed(self) -> bool: + """ + :obj:`True` when the connection is closed; :obj:`False` otherwise. + + Be aware that both :attr:`open` and :attr:`closed` are :obj:`False` + during the opening and closing sequences. + + """ + return self.state is State.CLOSED + + @property + def close_code(self) -> Optional[int]: + """ + WebSocket close code, defined in `section 7.1.5 of RFC 6455`_. + + .. _section 7.1.5 of RFC 6455: + https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5 + + :obj:`None` if the connection isn't closed yet. + + """ + if self.state is not State.CLOSED: + return None + elif self.close_rcvd is None: + return 1006 + else: + return self.close_rcvd.code + + @property + def close_reason(self) -> Optional[str]: + """ + WebSocket close reason, defined in `section 7.1.6 of RFC 6455`_. + + .. _section 7.1.6 of RFC 6455: + https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6 + + :obj:`None` if the connection isn't closed yet. + + """ + if self.state is not State.CLOSED: + return None + elif self.close_rcvd is None: + return "" + else: + return self.close_rcvd.reason + + async def __aiter__(self) -> AsyncIterator[Data]: + """ + Iterate on incoming messages. + + The iterator exits normally when the connection is closed with the + close code 1000 (OK) or 1001(going away). It raises + a :exc:`~websockets.exceptions.ConnectionClosedError` exception when + the connection is closed with any other code. + + """ + try: + while True: + yield await self.recv() + except ConnectionClosedOK: + return + + async def recv(self) -> Data: + """ + Receive the next message. + + When the connection is closed, :meth:`recv` raises + :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it + raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal + connection closure and + :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol + error or a network failure. This is how you detect the end of the + message stream. + + Canceling :meth:`recv` is safe. There's no risk of losing the next + message. The next invocation of :meth:`recv` will return it. + + This makes it possible to enforce a timeout by wrapping :meth:`recv` + in :func:`~asyncio.wait_for`. + + Returns: + Data: A string (:class:`str`) for a Text_ frame. A bytestring + (:class:`bytes`) for a Binary_ frame. + + .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + + Raises: + ConnectionClosed: when the connection is closed. + RuntimeError: if two coroutines call :meth:`recv` concurrently. + + """ + if self._pop_message_waiter is not None: + raise RuntimeError( + "cannot call recv while another coroutine " + "is already waiting for the next message" + ) + + # Don't await self.ensure_open() here: + # - messages could be available in the queue even if the connection + # is closed; + # - messages could be received before the closing frame even if the + # connection is closing. + + # Wait until there's a message in the queue (if necessary) or the + # connection is closed. + while len(self.messages) <= 0: + pop_message_waiter: asyncio.Future[None] = self.loop.create_future() + self._pop_message_waiter = pop_message_waiter + try: + # If asyncio.wait() is canceled, it doesn't cancel + # pop_message_waiter and self.transfer_data_task. + await asyncio.wait( + [pop_message_waiter, self.transfer_data_task], + return_when=asyncio.FIRST_COMPLETED, + **loop_if_py_lt_38(self.loop), + ) + finally: + self._pop_message_waiter = None + + # If asyncio.wait(...) exited because self.transfer_data_task + # completed before receiving a new message, raise a suitable + # exception (or return None if legacy_recv is enabled). + if not pop_message_waiter.done(): + if self.legacy_recv: + return None # type: ignore + else: + # Wait until the connection is closed to raise + # ConnectionClosed with the correct code and reason. + await self.ensure_open() + + # Pop a message from the queue. + message = self.messages.popleft() + + # Notify transfer_data(). + if self._put_message_waiter is not None: + self._put_message_waiter.set_result(None) + self._put_message_waiter = None + + return message + + async def send( + self, + message: Union[Data, Iterable[Data], AsyncIterable[Data]], + ) -> None: + """ + Send a message. + + A string (:class:`str`) is sent as a Text_ frame. A bytestring or + bytes-like object (:class:`bytes`, :class:`bytearray`, or + :class:`memoryview`) is sent as a Binary_ frame. + + .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + + :meth:`send` also accepts an iterable or an asynchronous iterable of + strings, bytestrings, or bytes-like objects to enable fragmentation_. + Each item is treated as a message fragment and sent in its own frame. + All items must be of the same type, or else :meth:`send` will raise a + :exc:`TypeError` and the connection will be closed. + + .. _fragmentation: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.4 + + :meth:`send` rejects dict-like objects because this is often an error. + (If you want to send the keys of a dict-like object as fragments, call + its :meth:`~dict.keys` method and pass the result to :meth:`send`.) + + Canceling :meth:`send` is discouraged. Instead, you should close the + connection with :meth:`close`. Indeed, there are only two situations + where :meth:`send` may yield control to the event loop and then get + canceled; in both cases, :meth:`close` has the same effect and is + more clear: + + 1. The write buffer is full. If you don't want to wait until enough + data is sent, your only alternative is to close the connection. + :meth:`close` will likely time out then abort the TCP connection. + 2. ``message`` is an asynchronous iterator that yields control. + Stopping in the middle of a fragmented message will cause a + protocol error and the connection will be closed. + + When the connection is closed, :meth:`send` raises + :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it + raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal + connection closure and + :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol + error or a network failure. + + Args: + message (Union[Data, Iterable[Data], AsyncIterable[Data]): message + to send. + + Raises: + ConnectionClosed: when the connection is closed. + TypeError: if ``message`` doesn't have a supported type. + + """ + await self.ensure_open() + + # While sending a fragmented message, prevent sending other messages + # until all fragments are sent. + while self._fragmented_message_waiter is not None: + await asyncio.shield(self._fragmented_message_waiter) + + # Unfragmented message -- this case must be handled first because + # strings and bytes-like objects are iterable. + + if isinstance(message, (str, bytes, bytearray, memoryview)): + opcode, data = prepare_data(message) + await self.write_frame(True, opcode, data) + + # Catch a common mistake -- passing a dict to send(). + + elif isinstance(message, Mapping): + raise TypeError("data is a dict-like object") + + # Fragmented message -- regular iterator. + + elif isinstance(message, Iterable): + + # Work around https://github.com/python/mypy/issues/6227 + message = cast(Iterable[Data], message) + + iter_message = iter(message) + try: + message_chunk = next(iter_message) + except StopIteration: + return + opcode, data = prepare_data(message_chunk) + + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) + + # Other fragments. + for message_chunk in iter_message: + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except (Exception, asyncio.CancelledError): + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None + + # Fragmented message -- asynchronous iterator + + elif isinstance(message, AsyncIterable): + # aiter_message = aiter(message) without aiter + # https://github.com/python/mypy/issues/5738 + aiter_message = type(message).__aiter__(message) # type: ignore + try: + # message_chunk = anext(aiter_message) without anext + # https://github.com/python/mypy/issues/5738 + message_chunk = await type(aiter_message).__anext__( # type: ignore + aiter_message + ) + except StopAsyncIteration: + return + opcode, data = prepare_data(message_chunk) + + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) + + # Other fragments. + # https://github.com/python/mypy/issues/5738 + # coverage reports this code as not covered, but it is + # exercised by tests - changing it breaks the tests! + async for message_chunk in aiter_message: # type: ignore # pragma: no cover # noqa + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except (Exception, asyncio.CancelledError): + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None + + else: + raise TypeError("data must be str, bytes-like, or iterable") + + async def close(self, code: int = 1000, reason: str = "") -> None: + """ + Perform the closing handshake. + + :meth:`close` waits for the other end to complete the handshake and + for the TCP connection to terminate. As a consequence, there's no need + to await :meth:`wait_closed` after :meth:`close`. + + :meth:`close` is idempotent: it doesn't do anything once the + connection is closed. + + Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given + that errors during connection termination aren't particularly useful. + + Canceling :meth:`close` is discouraged. If it takes too long, you can + set a shorter ``close_timeout``. If you don't want to wait, let the + Python process exit, then the OS will take care of closing the TCP + connection. + + Args: + code: WebSocket close code. + reason: WebSocket close reason. + + """ + try: + await asyncio.wait_for( + self.write_close_frame(Close(code, reason)), + self.close_timeout, + **loop_if_py_lt_38(self.loop), + ) + except asyncio.TimeoutError: + # If the close frame cannot be sent because the send buffers + # are full, the closing handshake won't complete anyway. + # Fail the connection to shut down faster. + self.fail_connection() + + # If no close frame is received within the timeout, wait_for() cancels + # the data transfer task and raises TimeoutError. + + # If close() is called multiple times concurrently and one of these + # calls hits the timeout, the data transfer task will be canceled. + # Other calls will receive a CancelledError here. + + try: + # If close() is canceled during the wait, self.transfer_data_task + # is canceled before the timeout elapses. + await asyncio.wait_for( + self.transfer_data_task, + self.close_timeout, + **loop_if_py_lt_38(self.loop), + ) + except (asyncio.TimeoutError, asyncio.CancelledError): + pass + + # Wait for the close connection task to close the TCP connection. + await asyncio.shield(self.close_connection_task) + + async def wait_closed(self) -> None: + """ + Wait until the connection is closed. + + This coroutine is identical to the :attr:`closed` attribute, except it + can be awaited. + + This can make it easier to detect connection termination, regardless + of its cause, in tasks that interact with the WebSocket connection. + + """ + await asyncio.shield(self.connection_lost_waiter) + + async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: + """ + Send a Ping_. + + .. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2 + + A ping may serve as a keepalive or as a check that the remote endpoint + received all messages up to this point + + Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return + immediately, it means the write buffer is full. If you don't want to + wait, you should close the connection. + + Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no + effect. + + Args: + data (Optional[Data]): payload of the ping; a string will be + encoded to UTF-8; or :obj:`None` to generate a payload + containing four random bytes. + + Returns: + ~asyncio.Future: A future that will be completed when the + corresponding pong is received. You can ignore it if you + don't intend to wait. + + :: + + pong_waiter = await ws.ping() + await pong_waiter # only if you want to wait for the pong + + Raises: + ConnectionClosed: when the connection is closed. + RuntimeError: if another ping was sent with the same data and + the corresponding pong wasn't received yet. + + """ + await self.ensure_open() + + if data is not None: + data = prepare_ctrl(data) + + # Protect against duplicates if a payload is explicitly set. + if data in self.pings: + raise RuntimeError("already waiting for a pong with the same data") + + # Generate a unique random payload otherwise. + while data is None or data in self.pings: + data = struct.pack("!I", random.getrandbits(32)) + + self.pings[data] = self.loop.create_future() + + await self.write_frame(True, OP_PING, data) + + return asyncio.shield(self.pings[data]) + + async def pong(self, data: Data = b"") -> None: + """ + Send a Pong_. + + .. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3 + + An unsolicited pong may serve as a unidirectional heartbeat. + + Canceling :meth:`pong` is discouraged. If :meth:`pong` doesn't return + immediately, it means the write buffer is full. If you don't want to + wait, you should close the connection. + + Args: + data (Data): payload of the pong; a string will be encoded to + UTF-8. + + Raises: + ConnectionClosed: when the connection is closed. + + """ + await self.ensure_open() + + data = prepare_ctrl(data) + + await self.write_frame(True, OP_PONG, data) + + # Private methods - no guarantees. + + def connection_closed_exc(self) -> ConnectionClosed: + exc: ConnectionClosed + if ( + self.close_rcvd is not None + and self.close_rcvd.code in OK_CLOSE_CODES + and self.close_sent is not None + and self.close_sent.code in OK_CLOSE_CODES + ): + exc = ConnectionClosedOK( + self.close_rcvd, + self.close_sent, + self.close_rcvd_then_sent, + ) + else: + exc = ConnectionClosedError( + self.close_rcvd, + self.close_sent, + self.close_rcvd_then_sent, + ) + # Chain to the exception that terminated data transfer, if any. + exc.__cause__ = self.transfer_data_exc + return exc + + async def ensure_open(self) -> None: + """ + Check that the WebSocket connection is open. + + Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't. + + """ + # Handle cases from most common to least common for performance. + if self.state is State.OPEN: + # If self.transfer_data_task exited without a closing handshake, + # self.close_connection_task may be closing the connection, going + # straight from OPEN to CLOSED. + if self.transfer_data_task.done(): + await asyncio.shield(self.close_connection_task) + raise self.connection_closed_exc() + else: + return + + if self.state is State.CLOSED: + raise self.connection_closed_exc() + + if self.state is State.CLOSING: + # If we started the closing handshake, wait for its completion to + # get the proper close code and reason. self.close_connection_task + # will complete within 4 or 5 * close_timeout after close(). The + # CLOSING state also occurs when failing the connection. In that + # case self.close_connection_task will complete even faster. + await asyncio.shield(self.close_connection_task) + raise self.connection_closed_exc() + + # Control may only reach this point in buggy third-party subclasses. + assert self.state is State.CONNECTING + raise InvalidState("WebSocket connection isn't established yet") + + async def transfer_data(self) -> None: + """ + Read incoming messages and put them in a queue. + + This coroutine runs in a task until the closing handshake is started. + + """ + try: + while True: + message = await self.read_message() + + # Exit the loop when receiving a close frame. + if message is None: + break + + # Wait until there's room in the queue (if necessary). + if self.max_queue is not None: + while len(self.messages) >= self.max_queue: + self._put_message_waiter = self.loop.create_future() + try: + await asyncio.shield(self._put_message_waiter) + finally: + self._put_message_waiter = None + + # Put the message in the queue. + self.messages.append(message) + + # Notify recv(). + if self._pop_message_waiter is not None: + self._pop_message_waiter.set_result(None) + self._pop_message_waiter = None + + except asyncio.CancelledError as exc: + self.transfer_data_exc = exc + # If fail_connection() cancels this task, avoid logging the error + # twice and failing the connection again. + raise + + except ProtocolError as exc: + self.transfer_data_exc = exc + self.fail_connection(1002) + + except (ConnectionError, TimeoutError, EOFError, ssl.SSLError) as exc: + # Reading data with self.reader.readexactly may raise: + # - most subclasses of ConnectionError if the TCP connection + # breaks, is reset, or is aborted; + # - TimeoutError if the TCP connection times out; + # - IncompleteReadError, a subclass of EOFError, if fewer + # bytes are available than requested; + # - ssl.SSLError if the other side infringes the TLS protocol. + self.transfer_data_exc = exc + self.fail_connection(1006) + + except UnicodeDecodeError as exc: + self.transfer_data_exc = exc + self.fail_connection(1007) + + except PayloadTooBig as exc: + self.transfer_data_exc = exc + self.fail_connection(1009) + + except Exception as exc: + # This shouldn't happen often because exceptions expected under + # regular circumstances are handled above. If it does, consider + # catching and handling more exceptions. + self.logger.error("data transfer failed", exc_info=True) + + self.transfer_data_exc = exc + self.fail_connection(1011) + + async def read_message(self) -> Optional[Data]: + """ + Read a single message from the connection. + + Re-assemble data frames if the message is fragmented. + + Return :obj:`None` when the closing handshake is started. + + """ + frame = await self.read_data_frame(max_size=self.max_size) + + # A close frame was received. + if frame is None: + return None + + if frame.opcode == OP_TEXT: + text = True + elif frame.opcode == OP_BINARY: + text = False + else: # frame.opcode == OP_CONT + raise ProtocolError("unexpected opcode") + + # Shortcut for the common case - no fragmentation + if frame.fin: + return frame.data.decode("utf-8") if text else frame.data + + # 5.4. Fragmentation + chunks: List[Data] = [] + max_size = self.max_size + if text: + decoder_factory = codecs.getincrementaldecoder("utf-8") + decoder = decoder_factory(errors="strict") + if max_size is None: + + def append(frame: Frame) -> None: + nonlocal chunks + chunks.append(decoder.decode(frame.data, frame.fin)) + + else: + + def append(frame: Frame) -> None: + nonlocal chunks, max_size + chunks.append(decoder.decode(frame.data, frame.fin)) + assert isinstance(max_size, int) + max_size -= len(frame.data) + + else: + if max_size is None: + + def append(frame: Frame) -> None: + nonlocal chunks + chunks.append(frame.data) + + else: + + def append(frame: Frame) -> None: + nonlocal chunks, max_size + chunks.append(frame.data) + assert isinstance(max_size, int) + max_size -= len(frame.data) + + append(frame) + + while not frame.fin: + frame = await self.read_data_frame(max_size=max_size) + if frame is None: + raise ProtocolError("incomplete fragmented message") + if frame.opcode != OP_CONT: + raise ProtocolError("unexpected opcode") + append(frame) + + return ("" if text else b"").join(chunks) + + async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]: + """ + Read a single data frame from the connection. + + Process control frames received before the next data frame. + + Return :obj:`None` if a close frame is encountered before any data frame. + + """ + # 6.2. Receiving Data + while True: + frame = await self.read_frame(max_size) + + # 5.5. Control Frames + if frame.opcode == OP_CLOSE: + # 7.1.5. The WebSocket Connection Close Code + # 7.1.6. The WebSocket Connection Close Reason + self.close_rcvd = Close.parse(frame.data) + if self.close_sent is not None: + self.close_rcvd_then_sent = False + try: + # Echo the original data instead of re-serializing it with + # Close.serialize() because that fails when the close frame + # is empty and Close.parse() synthetizes a 1005 close code. + await self.write_close_frame(self.close_rcvd, frame.data) + except ConnectionClosed: + # Connection closed before we could echo the close frame. + pass + return None + + elif frame.opcode == OP_PING: + # Answer pings, unless connection is CLOSING. + if self.state is State.OPEN: + try: + await self.pong(frame.data) + except ConnectionClosed: + # Connection closed while draining write buffer. + pass + + elif frame.opcode == OP_PONG: + if frame.data in self.pings: + # Sending a pong for only the most recent ping is legal. + # Acknowledge all previous pings too in that case. + ping_id = None + ping_ids = [] + for ping_id, ping in self.pings.items(): + ping_ids.append(ping_id) + if not ping.done(): + ping.set_result(None) + if ping_id == frame.data: + break + else: # pragma: no cover + assert False, "ping_id is in self.pings" + # Remove acknowledged pings from self.pings. + for ping_id in ping_ids: + del self.pings[ping_id] + + # 5.6. Data Frames + else: + return frame + + async def read_frame(self, max_size: Optional[int]) -> Frame: + """ + Read a single frame from the connection. + + """ + frame = await Frame.read( + self.reader.readexactly, + mask=not self.is_client, + max_size=max_size, + extensions=self.extensions, + ) + if self.debug: + self.logger.debug("< %s", frame) + return frame + + def write_frame_sync(self, fin: bool, opcode: int, data: bytes) -> None: + frame = Frame(fin, Opcode(opcode), data) + if self.debug: + self.logger.debug("> %s", frame) + frame.write( + self.transport.write, + mask=self.is_client, + extensions=self.extensions, + ) + + async def drain(self) -> None: + try: + # drain() cannot be called concurrently by multiple coroutines: + # http://bugs.python.org/issue29930. Remove this lock when no + # version of Python where this bugs exists is supported anymore. + async with self._drain_lock: + # Handle flow control automatically. + await self._drain() + except ConnectionError: + # Terminate the connection if the socket died. + self.fail_connection() + # Wait until the connection is closed to raise ConnectionClosed + # with the correct code and reason. + await self.ensure_open() + + async def write_frame( + self, fin: bool, opcode: int, data: bytes, *, _state: int = State.OPEN + ) -> None: + # Defensive assertion for protocol compliance. + if self.state is not _state: # pragma: no cover + raise InvalidState( + f"Cannot write to a WebSocket in the {self.state.name} state" + ) + self.write_frame_sync(fin, opcode, data) + await self.drain() + + async def write_close_frame( + self, close: Close, data: Optional[bytes] = None + ) -> None: + """ + Write a close frame if and only if the connection state is OPEN. + + This dedicated coroutine must be used for writing close frames to + ensure that at most one close frame is sent on a given connection. + + """ + # Test and set the connection state before sending the close frame to + # avoid sending two frames in case of concurrent calls. + if self.state is State.OPEN: + # 7.1.3. The WebSocket Closing Handshake is Started + self.state = State.CLOSING + if self.debug: + self.logger.debug("= connection is CLOSING") + + self.close_sent = close + if self.close_rcvd is not None: + self.close_rcvd_then_sent = True + if data is None: + data = close.serialize() + + # 7.1.2. Start the WebSocket Closing Handshake + await self.write_frame(True, OP_CLOSE, data, _state=State.CLOSING) + + async def keepalive_ping(self) -> None: + """ + Send a Ping frame and wait for a Pong frame at regular intervals. + + This coroutine exits when the connection terminates and one of the + following happens: + + - :meth:`ping` raises :exc:`ConnectionClosed`, or + - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. + + """ + if self.ping_interval is None: + return + + try: + while True: + await asyncio.sleep( + self.ping_interval, + **loop_if_py_lt_38(self.loop), + ) + + # ping() raises CancelledError if the connection is closed, + # when close_connection() cancels self.keepalive_ping_task. + + # ping() raises ConnectionClosed if the connection is lost, + # when connection_lost() calls abort_pings(). + + self.logger.debug("% sending keepalive ping") + pong_waiter = await self.ping() + + if self.ping_timeout is not None: + try: + await asyncio.wait_for( + pong_waiter, + self.ping_timeout, + **loop_if_py_lt_38(self.loop), + ) + self.logger.debug("% received keepalive pong") + except asyncio.TimeoutError: + if self.debug: + self.logger.debug("! timed out waiting for keepalive pong") + self.fail_connection(1011, "keepalive ping timeout") + break + + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. + except asyncio.CancelledError: + raise + + except ConnectionClosed: + pass + + except Exception: + self.logger.error("keepalive ping failed", exc_info=True) + + async def close_connection(self) -> None: + """ + 7.1.1. Close the WebSocket Connection + + When the opening handshake succeeds, :meth:`connection_open` starts + this coroutine in a task. It waits for the data transfer phase to + complete then it closes the TCP connection cleanly. + + When the opening handshake fails, :meth:`fail_connection` does the + same. There's no data transfer phase in that case. + + """ + try: + # Wait for the data transfer phase to complete. + if hasattr(self, "transfer_data_task"): + try: + await self.transfer_data_task + except asyncio.CancelledError: + pass + + # Cancel the keepalive ping task. + if hasattr(self, "keepalive_ping_task"): + self.keepalive_ping_task.cancel() + + # A client should wait for a TCP close from the server. + if self.is_client and hasattr(self, "transfer_data_task"): + if await self.wait_for_connection_lost(): + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover + if self.debug: + self.logger.debug("! timed out waiting for TCP close") + + # Half-close the TCP connection if possible (when there's no TLS). + if self.transport.can_write_eof(): + if self.debug: + self.logger.debug("x half-closing TCP connection") + # write_eof() doesn't document which exceptions it raises. + # "[Errno 107] Transport endpoint is not connected" happens + # but it isn't completely clear under which circumstances. + # uvloop can raise RuntimeError here. + try: + self.transport.write_eof() + except (OSError, RuntimeError): # pragma: no cover + pass + + if await self.wait_for_connection_lost(): + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover + if self.debug: + self.logger.debug("! timed out waiting for TCP close") + + finally: + # The try/finally ensures that the transport never remains open, + # even if this coroutine is canceled (for example). + await self.close_transport() + + async def close_transport(self) -> None: + """ + Close the TCP connection. + + """ + # If connection_lost() was called, the TCP connection is closed. + # However, if TLS is enabled, the transport still needs closing. + # Else asyncio complains: ResourceWarning: unclosed transport. + if self.connection_lost_waiter.done() and self.transport.is_closing(): + return + + # Close the TCP connection. Buffers are flushed asynchronously. + if self.debug: + self.logger.debug("x closing TCP connection") + self.transport.close() + + if await self.wait_for_connection_lost(): + return + if self.debug: + self.logger.debug("! timed out waiting for TCP close") + + # Abort the TCP connection. Buffers are discarded. + if self.debug: + self.logger.debug("x aborting TCP connection") + self.transport.abort() + + # connection_lost() is called quickly after aborting. + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + await self.wait_for_connection_lost() # pragma: no cover + + async def wait_for_connection_lost(self) -> bool: + """ + Wait until the TCP connection is closed or ``self.close_timeout`` elapses. + + Return :obj:`True` if the connection is closed and :obj:`False` + otherwise. + + """ + if not self.connection_lost_waiter.done(): + try: + await asyncio.wait_for( + asyncio.shield(self.connection_lost_waiter), + self.close_timeout, + **loop_if_py_lt_38(self.loop), + ) + except asyncio.TimeoutError: + pass + # Re-check self.connection_lost_waiter.done() synchronously because + # connection_lost() could run between the moment the timeout occurs + # and the moment this coroutine resumes running. + return self.connection_lost_waiter.done() + + def fail_connection(self, code: int = 1006, reason: str = "") -> None: + """ + 7.1.7. Fail the WebSocket Connection + + This requires: + + 1. Stopping all processing of incoming data, which means cancelling + :attr:`transfer_data_task`. The close code will be 1006 unless a + close frame was received earlier. + + 2. Sending a close frame with an appropriate code if the opening + handshake succeeded and the other side is likely to process it. + + 3. Closing the connection. :meth:`close_connection` takes care of + this once :attr:`transfer_data_task` exits after being canceled. + + (The specification describes these steps in the opposite order.) + + """ + if self.debug: + self.logger.debug("! failing connection with code %d", code) + + # Cancel transfer_data_task if the opening handshake succeeded. + # cancel() is idempotent and ignored if the task is done already. + if hasattr(self, "transfer_data_task"): + self.transfer_data_task.cancel() + + # Send a close frame when the state is OPEN (a close frame was already + # sent if it's CLOSING), except when failing the connection because of + # an error reading from or writing to the network. + # Don't send a close frame if the connection is broken. + if code != 1006 and self.state is State.OPEN: + close = Close(code, reason) + + # Write the close frame without draining the write buffer. + + # Keeping fail_connection() synchronous guarantees it can't + # get stuck and simplifies the implementation of the callers. + # Not drainig the write buffer is acceptable in this context. + + # This duplicates a few lines of code from write_close_frame(). + + self.state = State.CLOSING + if self.debug: + self.logger.debug("= connection is CLOSING") + + # If self.close_rcvd was set, the connection state would be + # CLOSING. Therefore self.close_rcvd isn't set and we don't + # have to set self.close_rcvd_then_sent. + assert self.close_rcvd is None + self.close_sent = close + + self.write_frame_sync(True, OP_CLOSE, close.serialize()) + + # Start close_connection_task if the opening handshake didn't succeed. + if not hasattr(self, "close_connection_task"): + self.close_connection_task = self.loop.create_task(self.close_connection()) + + def abort_pings(self) -> None: + """ + Raise ConnectionClosed in pending keepalive pings. + + They'll never receive a pong once the connection is closed. + + """ + assert self.state is State.CLOSED + exc = self.connection_closed_exc() + + for ping in self.pings.values(): + ping.set_exception(exc) + # If the exception is never retrieved, it will be logged when ping + # is garbage-collected. This is confusing for users. + # Given that ping is done (with an exception), canceling it does + # nothing, but it prevents logging the exception. + ping.cancel() + + # asyncio.Protocol methods + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """ + Configure write buffer limits. + + The high-water limit is defined by ``self.write_limit``. + + The low-water limit currently defaults to ``self.write_limit // 4`` in + :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should + be all right for reasonable use cases of this library. + + This is the earliest point where we can get hold of the transport, + which means it's the best point for configuring it. + + """ + transport = cast(asyncio.Transport, transport) + transport.set_write_buffer_limits(self.write_limit) + self.transport = transport + + # Copied from asyncio.StreamReaderProtocol + self.reader.set_transport(transport) + + def connection_lost(self, exc: Optional[Exception]) -> None: + """ + 7.1.4. The WebSocket Connection is Closed. + + """ + self.state = State.CLOSED + self.logger.debug("= connection is CLOSED") + + self.abort_pings() + + # If self.connection_lost_waiter isn't pending, that's a bug, because: + # - it's set only here in connection_lost() which is called only once; + # - it must never be canceled. + self.connection_lost_waiter.set_result(None) + + if True: # pragma: no cover + + # Copied from asyncio.StreamReaderProtocol + if self.reader is not None: + if exc is None: + self.reader.feed_eof() + else: + self.reader.set_exception(exc) + + # Copied from asyncio.FlowControlMixin + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + def pause_writing(self) -> None: # pragma: no cover + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: # pragma: no cover + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def data_received(self, data: bytes) -> None: + self.reader.feed_data(data) + + def eof_received(self) -> None: + """ + Close the transport after receiving EOF. + + The WebSocket protocol has its own closing handshake: endpoints close + the TCP or TLS connection after sending and receiving a close frame. + + As a consequence, they never need to write after receiving EOF, so + there's no reason to keep the transport open by returning :obj:`True`. + + Besides, that doesn't work on TLS connections. + + """ + self.reader.feed_eof() + + +def broadcast(websockets: Iterable[WebSocketCommonProtocol], message: Data) -> None: + """ + Broadcast a message to several WebSocket connections. + + A string (:class:`str`) is sent as a Text_ frame. A bytestring or + bytes-like object (:class:`bytes`, :class:`bytearray`, or + :class:`memoryview`) is sent as a Binary_ frame. + + .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + + :func:`broadcast` pushes the message synchronously to all connections even + if their write buffers are overflowing. There's no backpressure. + + :func:`broadcast` skips silently connections that aren't open in order to + avoid errors on connections where the closing handshake is in progress. + + If you broadcast messages faster than a connection can handle them, + messages will pile up in its write buffer until the connection times out. + Keep low values for ``ping_interval`` and ``ping_timeout`` to prevent + excessive memory usage by slow connections when you use :func:`broadcast`. + + Unlike :meth:`~websockets.server.WebSocketServerProtocol.send`, + :func:`broadcast` doesn't support sending fragmented messages. Indeed, + fragmentation is useful for sending large messages without buffering + them in memory, while :func:`broadcast` buffers one copy per connection + as fast as possible. + + Args: + websockets (Iterable[WebSocketCommonProtocol]): WebSocket connections + to which the message will be sent. + message (Data): message to send. + + Raises: + RuntimeError: if a connection is busy sending a fragmented message. + TypeError: if ``message`` doesn't have a supported type. + + """ + if not isinstance(message, (str, bytes, bytearray, memoryview)): + raise TypeError("data must be str or bytes-like") + + opcode, data = prepare_data(message) + + for websocket in websockets: + if websocket.state is not State.OPEN: + continue + + if websocket._fragmented_message_waiter is not None: + raise RuntimeError("busy sending a fragmented message") + + websocket.write_frame_sync(True, opcode, data) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/server.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/server.py new file mode 100644 index 00000000..3e51db1b --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/legacy/server.py @@ -0,0 +1,1156 @@ +from __future__ import annotations + +import asyncio +import email.utils +import functools +import http +import inspect +import logging +import socket +import warnings +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Generator, + Iterable, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) + +from ..connection import State +from ..datastructures import Headers, HeadersLike, MultipleValuesError +from ..exceptions import ( + AbortHandshake, + InvalidHandshake, + InvalidHeader, + InvalidMessage, + InvalidOrigin, + InvalidUpgrade, + NegotiationError, +) +from ..extensions import Extension, ServerExtensionFactory +from ..extensions.permessage_deflate import enable_server_permessage_deflate +from ..headers import ( + build_extension, + parse_extension, + parse_subprotocol, + validate_subprotocols, +) +from ..http import USER_AGENT +from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol +from .compatibility import loop_if_py_lt_38 +from .handshake import build_response, check_request +from .http import read_request +from .protocol import WebSocketCommonProtocol + + +__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] + + +HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] + +HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes] + + +class WebSocketServerProtocol(WebSocketCommonProtocol): + """ + WebSocket server connection. + + :class:`WebSocketServerProtocol` provides :meth:`recv` and :meth:`send` + coroutines for receiving and sending messages. + + It supports asynchronous iteration to receive messages:: + + async for message in websocket: + await process(message) + + The iterator exits normally when the connection is closed with close code + 1000 (OK) or 1001 (going away). It raises + a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection + is closed with any other code. + + You may customize the opening handshake in a subclass by + overriding :meth:`process_request` or :meth:`select_subprotocol`. + + Args: + ws_server: WebSocket server that created this connection. + + See :func:`serve` for the documentation of ``ws_handler``, ``logger``, ``origins``, + ``extensions``, ``subprotocols``, and ``extra_headers``. + + See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the + documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``. + + """ + + is_client = False + side = "server" + + def __init__( + self, + ws_handler: Union[ + Callable[[WebSocketServerProtocol], Awaitable[Any]], + Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated + ], + ws_server: WebSocketServer, + *, + logger: Optional[LoggerLike] = None, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] + ] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, + **kwargs: Any, + ) -> None: + if logger is None: + logger = logging.getLogger("websockets.server") + super().__init__(logger=logger, **kwargs) + # For backwards compatibility with 6.0 or earlier. + if origins is not None and "" in origins: + warnings.warn("use None instead of '' in origins", DeprecationWarning) + origins = [None if origin == "" else origin for origin in origins] + # For backwards compatibility with 10.0 or earlier. Done here in + # addition to serve to trigger the deprecation warning on direct + # use of WebSocketServerProtocol. + self.ws_handler = remove_path_argument(ws_handler) + self.ws_server = ws_server + self.origins = origins + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + self._process_request = process_request + self._select_subprotocol = select_subprotocol + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """ + Register connection and initialize a task to handle it. + + """ + super().connection_made(transport) + # Register the connection with the server before creating the handler + # task. Registering at the beginning of the handler coroutine would + # create a race condition between the creation of the task, which + # schedules its execution, and the moment the handler starts running. + self.ws_server.register(self) + self.handler_task = self.loop.create_task(self.handler()) + + async def handler(self) -> None: + """ + Handle the lifecycle of a WebSocket connection. + + Since this method doesn't have a caller able to handle exceptions, it + attemps to log relevant ones and guarantees that the TCP connection is + closed before exiting. + + """ + try: + + try: + await self.handshake( + origins=self.origins, + available_extensions=self.available_extensions, + available_subprotocols=self.available_subprotocols, + extra_headers=self.extra_headers, + ) + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. + except asyncio.CancelledError: # pragma: no cover + raise + except ConnectionError: + raise + except Exception as exc: + if isinstance(exc, AbortHandshake): + status, headers, body = exc.status, exc.headers, exc.body + elif isinstance(exc, InvalidOrigin): + if self.debug: + self.logger.debug("! invalid origin", exc_info=True) + status, headers, body = ( + http.HTTPStatus.FORBIDDEN, + Headers(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), + ) + elif isinstance(exc, InvalidUpgrade): + if self.debug: + self.logger.debug("! invalid upgrade", exc_info=True) + status, headers, body = ( + http.HTTPStatus.UPGRADE_REQUIRED, + Headers([("Upgrade", "websocket")]), + ( + f"Failed to open a WebSocket connection: {exc}.\n" + f"\n" + f"You cannot access a WebSocket server directly " + f"with a browser. You need a WebSocket client.\n" + ).encode(), + ) + elif isinstance(exc, InvalidHandshake): + if self.debug: + self.logger.debug("! invalid handshake", exc_info=True) + status, headers, body = ( + http.HTTPStatus.BAD_REQUEST, + Headers(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), + ) + else: + self.logger.error("opening handshake failed", exc_info=True) + status, headers, body = ( + http.HTTPStatus.INTERNAL_SERVER_ERROR, + Headers(), + ( + b"Failed to open a WebSocket connection.\n" + b"See server log for more information.\n" + ), + ) + + headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + headers.setdefault("Server", USER_AGENT) + headers.setdefault("Content-Length", str(len(body))) + headers.setdefault("Content-Type", "text/plain") + headers.setdefault("Connection", "close") + + self.write_http_response(status, headers, body) + self.logger.info( + "connection failed (%d %s)", status.value, status.phrase + ) + await self.close_transport() + return + + try: + await self.ws_handler(self) + except Exception: + self.logger.error("connection handler failed", exc_info=True) + if not self.closed: + self.fail_connection(1011) + raise + + try: + await self.close() + except ConnectionError: + raise + except Exception: + self.logger.error("closing handshake failed", exc_info=True) + raise + + except Exception: + # Last-ditch attempt to avoid leaking connections on errors. + try: + self.transport.close() + except Exception: # pragma: no cover + pass + + finally: + # Unregister the connection with the server when the handler task + # terminates. Registration is tied to the lifecycle of the handler + # task because the server waits for tasks attached to registered + # connections before terminating. + self.ws_server.unregister(self) + self.logger.info("connection closed") + + async def read_http_request(self) -> Tuple[str, Headers]: + """ + Read request line and headers from the HTTP request. + + If the request contains a body, it may be read from ``self.reader`` + after this coroutine returns. + + Raises: + InvalidMessage: if the HTTP message is malformed or isn't an + HTTP/1.1 GET request. + + """ + try: + path, headers = await read_request(self.reader) + except asyncio.CancelledError: # pragma: no cover + raise + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP request") from exc + + if self.debug: + self.logger.debug("< GET %s HTTP/1.1", path) + for key, value in headers.raw_items(): + self.logger.debug("< %s: %s", key, value) + + self.path = path + self.request_headers = headers + + return path, headers + + def write_http_response( + self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None + ) -> None: + """ + Write status line and headers to the HTTP response. + + This coroutine is also able to write a response body. + + """ + self.response_headers = headers + + if self.debug: + self.logger.debug("> HTTP/1.1 %d %s", status.value, status.phrase) + for key, value in headers.raw_items(): + self.logger.debug("> %s: %s", key, value) + if body is not None: + self.logger.debug("> [body] (%d bytes)", len(body)) + + # Since the status line and headers only contain ASCII characters, + # we can keep this simple. + response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" + response += str(headers) + + self.transport.write(response.encode()) + + if body is not None: + self.transport.write(body) + + async def process_request( + self, path: str, request_headers: Headers + ) -> Optional[HTTPResponse]: + """ + Intercept the HTTP request and return an HTTP response if appropriate. + + You may override this method in a :class:`WebSocketServerProtocol` + subclass, for example: + + * to return a HTTP 200 OK response on a given path; then a load + balancer can use this path for a health check; + * to authenticate the request and return a HTTP 401 Unauthorized or a + HTTP 403 Forbidden when authentication fails. + + You may also override this method with the ``process_request`` + argument of :func:`serve` and :class:`WebSocketServerProtocol`. This + is equivalent, except ``process_request`` won't have access to the + protocol instance, so it can't store information for later use. + + :meth:`process_request` is expected to complete quickly. If it may run + for a long time, then it should await :meth:`wait_closed` and exit if + :meth:`wait_closed` completes, or else it could prevent the server + from shutting down. + + Args: + path: request path, including optional query string. + request_headers: request headers. + + Returns: + Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]: :obj:`None` + to continue the WebSocket handshake normally. + + An HTTP response, represented by a 3-uple of the response status, + headers, and body, to abort the WebSocket handshake and return + that HTTP response instead. + + """ + if self._process_request is not None: + response = self._process_request(path, request_headers) + if isinstance(response, Awaitable): + return await response + else: + # For backwards compatibility with 7.0. + warnings.warn( + "declare process_request as a coroutine", DeprecationWarning + ) + return response + return None + + @staticmethod + def process_origin( + headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None + ) -> Optional[Origin]: + """ + Handle the Origin HTTP request header. + + Args: + headers: request headers. + origins: optional list of acceptable origins. + + Raises: + InvalidOrigin: if the origin isn't acceptable. + + """ + # "The user agent MUST NOT include more than one Origin header field" + # per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3. + try: + origin = cast(Optional[Origin], headers.get("Origin")) + except MultipleValuesError as exc: + raise InvalidHeader("Origin", "more than one Origin header found") from exc + if origins is not None: + if origin not in origins: + raise InvalidOrigin(origin) + return origin + + @staticmethod + def process_extensions( + headers: Headers, + available_extensions: Optional[Sequence[ServerExtensionFactory]], + ) -> Tuple[Optional[str], List[Extension]]: + """ + Handle the Sec-WebSocket-Extensions HTTP request header. + + Accept or reject each extension proposed in the client request. + Negotiate parameters for accepted extensions. + + Return the Sec-WebSocket-Extensions HTTP response header and the list + of accepted extensions. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension proposed by + the client, we check for a match with each extension available in the + server configuration. If no match is found, the extension is ignored. + + If several variants of the same extension are proposed by the client, + it may be accepted several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + This process doesn't allow the server to reorder extensions. It can + only select a subset of the extensions proposed by the client. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + Args: + headers: request headers. + extensions: optional list of supported extensions. + + Raises: + InvalidHandshake: to abort the handshake with an HTTP 400 error. + + """ + response_header_value: Optional[str] = None + + extension_headers: List[ExtensionHeader] = [] + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values and available_extensions: + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, request_params in parsed_header_values: + + for ext_factory in available_extensions: + + # Skip non-matching extensions based on their name. + if ext_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + response_params, extension = ext_factory.process_request_params( + request_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + extension_headers.append((name, response_params)) + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the client sent. The extension is declined. + + # Serialize extension header. + if extension_headers: + response_header_value = build_extension(extension_headers) + + return response_header_value, accepted_extensions + + # Not @staticmethod because it calls self.select_subprotocol() + def process_subprotocol( + self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP request header. + + Return Sec-WebSocket-Protocol HTTP response header, which is the same + as the selected subprotocol. + + Args: + headers: request headers. + available_subprotocols: optional list of supported subprotocols. + + Raises: + InvalidHandshake: to abort the handshake with an HTTP 400 error. + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values and available_subprotocols: + + parsed_header_values: List[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + subprotocol = self.select_subprotocol( + parsed_header_values, available_subprotocols + ) + + return subprotocol + + def select_subprotocol( + self, + client_subprotocols: Sequence[Subprotocol], + server_subprotocols: Sequence[Subprotocol], + ) -> Optional[Subprotocol]: + """ + Pick a subprotocol among those offered by the client. + + If several subprotocols are supported by the client and the server, + the default implementation selects the preferred subprotocol by + giving equal value to the priorities of the client and the server. + If no subprotocol is supported by the client and the server, it + proceeds without a subprotocol. + + This is unlikely to be the most useful implementation in practice. + Many servers providing a subprotocol will require that the client + uses that subprotocol. Such rules can be implemented in a subclass. + + You may also override this method with the ``select_subprotocol`` + argument of :func:`serve` and :class:`WebSocketServerProtocol`. + + Args: + client_subprotocols: list of subprotocols offered by the client. + server_subprotocols: list of subprotocols available on the server. + + Returns: + Optional[Subprotocol]: Selected subprotocol. + + :obj:`None` to continue without a subprotocol. + + + """ + if self._select_subprotocol is not None: + return self._select_subprotocol(client_subprotocols, server_subprotocols) + + subprotocols = set(client_subprotocols) & set(server_subprotocols) + if not subprotocols: + return None + priority = lambda p: ( + client_subprotocols.index(p) + server_subprotocols.index(p) + ) + return sorted(subprotocols, key=priority)[0] + + async def handshake( + self, + origins: Optional[Sequence[Optional[Origin]]] = None, + available_extensions: Optional[Sequence[ServerExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + ) -> str: + """ + Perform the server side of the opening handshake. + + Args: + origins: list of acceptable values of the Origin HTTP header; + include :obj:`None` if the lack of an origin is acceptable. + extensions: list of supported extensions, in order in which they + should be tried. + subprotocols: list of supported subprotocols, in order of + decreasing preference. + extra_headers: arbitrary HTTP headers to add to the response when + the handshake succeeds. + + Returns: + str: path of the URI of the request. + + Raises: + InvalidHandshake: if the handshake fails. + + """ + path, request_headers = await self.read_http_request() + + # Hook for customizing request handling, for example checking + # authentication or treating some paths as plain HTTP endpoints. + early_response_awaitable = self.process_request(path, request_headers) + if isinstance(early_response_awaitable, Awaitable): + early_response = await early_response_awaitable + else: + # For backwards compatibility with 7.0. + warnings.warn("declare process_request as a coroutine", DeprecationWarning) + early_response = early_response_awaitable + + # The connection may drop while process_request is running. + if self.state is State.CLOSED: + raise self.connection_closed_exc() # pragma: no cover + + # Change the response to a 503 error if the server is shutting down. + if not self.ws_server.is_serving(): + early_response = ( + http.HTTPStatus.SERVICE_UNAVAILABLE, + [], + b"Server is shutting down.\n", + ) + + if early_response is not None: + raise AbortHandshake(*early_response) + + key = check_request(request_headers) + + self.origin = self.process_origin(request_headers, origins) + + extensions_header, self.extensions = self.process_extensions( + request_headers, available_extensions + ) + + protocol_header = self.subprotocol = self.process_subprotocol( + request_headers, available_subprotocols + ) + + response_headers = Headers() + + build_response(response_headers, key) + + if extensions_header is not None: + response_headers["Sec-WebSocket-Extensions"] = extensions_header + + if protocol_header is not None: + response_headers["Sec-WebSocket-Protocol"] = protocol_header + + if callable(extra_headers): + extra_headers = extra_headers(path, self.request_headers) + if extra_headers is not None: + response_headers.update(extra_headers) + + response_headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + response_headers.setdefault("Server", USER_AGENT) + + self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers) + + self.logger.info("connection open") + + self.connection_open() + + return path + + +class WebSocketServer: + """ + WebSocket server returned by :func:`serve`. + + This class provides the same interface as :class:`~asyncio.Server`, + notably the :meth:`~asyncio.Server.close` + and :meth:`~asyncio.Server.wait_closed` methods. + + It keeps track of WebSocket connections in order to close them properly + when shutting down. + + Args: + logger: logger for this server; + defaults to ``logging.getLogger("websockets.server")``; + see the :doc:`logging guide <../topics/logging>` for details. + + """ + + def __init__(self, logger: Optional[LoggerLike] = None): + if logger is None: + logger = logging.getLogger("websockets.server") + self.logger = logger + + # Keep track of active connections. + self.websockets: Set[WebSocketServerProtocol] = set() + + # Task responsible for closing the server and terminating connections. + self.close_task: Optional[asyncio.Task[None]] = None + + # Completed when the server is closed and connections are terminated. + self.closed_waiter: asyncio.Future[None] + + def wrap(self, server: asyncio.base_events.Server) -> None: + """ + Attach to a given :class:`~asyncio.Server`. + + Since :meth:`~asyncio.loop.create_server` doesn't support injecting a + custom ``Server`` class, the easiest solution that doesn't rely on + private :mod:`asyncio` APIs is to: + + - instantiate a :class:`WebSocketServer` + - give the protocol factory a reference to that instance + - call :meth:`~asyncio.loop.create_server` with the factory + - attach the resulting :class:`~asyncio.Server` with this method + + """ + self.server = server + for sock in server.sockets: + if sock.family == socket.AF_INET: + name = "%s:%d" % sock.getsockname() + elif sock.family == socket.AF_INET6: + name = "[%s]:%d" % sock.getsockname()[:2] + elif sock.family == socket.AF_UNIX: + name = sock.getsockname() + # In the unlikely event that someone runs websockets over a + # protocol other than IP or Unix sockets, avoid crashing. + else: # pragma: no cover + name = str(sock.getsockname()) + self.logger.info("server listening on %s", name) + + # Initialized here because we need a reference to the event loop. + # This should be moved back to __init__ in Python 3.10. + self.closed_waiter = server.get_loop().create_future() + + def register(self, protocol: WebSocketServerProtocol) -> None: + """ + Register a connection with this server. + + """ + self.websockets.add(protocol) + + def unregister(self, protocol: WebSocketServerProtocol) -> None: + """ + Unregister a connection with this server. + + """ + self.websockets.remove(protocol) + + def close(self) -> None: + """ + Close the server. + + This method: + + * closes the underlying :class:`~asyncio.Server`; + * rejects new WebSocket connections with an HTTP 503 (service + unavailable) error; this happens when the server accepted the TCP + connection but didn't complete the WebSocket opening handshake prior + to closing; + * closes open WebSocket connections with close code 1001 (going away). + + :meth:`close` is idempotent. + + """ + if self.close_task is None: + self.close_task = self.get_loop().create_task(self._close()) + + async def _close(self) -> None: + """ + Implementation of :meth:`close`. + + This calls :meth:`~asyncio.Server.close` on the underlying + :class:`~asyncio.Server` object to stop accepting new connections and + then closes open connections with close code 1001. + + """ + self.logger.info("server closing") + + # Stop accepting new connections. + self.server.close() + + # Wait until self.server.close() completes. + await self.server.wait_closed() + + # Wait until all accepted connections reach connection_made() and call + # register(). See https://bugs.python.org/issue34852 for details. + await asyncio.sleep(0, **loop_if_py_lt_38(self.get_loop())) + + # Close OPEN connections with status code 1001. Since the server was + # closed, handshake() closes OPENING connections with a HTTP 503 + # error. Wait until all connections are closed. + + close_tasks = [ + asyncio.create_task(websocket.close(1001)) + for websocket in self.websockets + if websocket.state is not State.CONNECTING + ] + # asyncio.wait doesn't accept an empty first argument. + if close_tasks: + await asyncio.wait( + close_tasks, + **loop_if_py_lt_38(self.get_loop()), + ) + + # Wait until all connection handlers are complete. + + # asyncio.wait doesn't accept an empty first argument. + if self.websockets: + await asyncio.wait( + [websocket.handler_task for websocket in self.websockets], + **loop_if_py_lt_38(self.get_loop()), + ) + + # Tell wait_closed() to return. + self.closed_waiter.set_result(None) + + self.logger.info("server closed") + + async def wait_closed(self) -> None: + """ + Wait until the server is closed. + + When :meth:`wait_closed` returns, all TCP connections are closed and + all connection handlers have returned. + + To ensure a fast shutdown, a connection handler should always be + awaiting at least one of: + + * :meth:`~WebSocketServerProtocol.recv`: when the connection is closed, + it raises :exc:`~websockets.exceptions.ConnectionClosedOK`; + * :meth:`~WebSocketServerProtocol.wait_closed`: when the connection is + closed, it returns. + + Then the connection handler is immediately notified of the shutdown; + it can clean up and exit. + + """ + await asyncio.shield(self.closed_waiter) + + def get_loop(self) -> asyncio.AbstractEventLoop: + """ + See :meth:`asyncio.Server.get_loop`. + + """ + return self.server.get_loop() + + def is_serving(self) -> bool: + """ + See :meth:`asyncio.Server.is_serving`. + + """ + return self.server.is_serving() + + async def start_serving(self) -> None: + """ + See :meth:`asyncio.Server.start_serving`. + + """ + await self.server.start_serving() # pragma: no cover + + async def serve_forever(self) -> None: + """ + See :meth:`asyncio.Server.serve_forever`. + + """ + await self.server.serve_forever() # pragma: no cover + + @property + def sockets(self) -> Iterable[socket.socket]: + """ + See :attr:`asyncio.Server.sockets`. + + """ + return self.server.sockets + + async def __aenter__(self) -> WebSocketServer: + return self # pragma: no cover + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + self.close() # pragma: no cover + await self.wait_closed() # pragma: no cover + + +class Serve: + """ + Start a WebSocket server listening on ``host`` and ``port``. + + Whenever a client connects, the server creates a + :class:`WebSocketServerProtocol`, performs the opening handshake, and + delegates to the connection handler, ``ws_handler``. + + The handler receives the :class:`WebSocketServerProtocol` and uses it to + send and receive messages. + + Once the handler completes, either normally or with an exception, the + server performs the closing handshake and closes the connection. + + Awaiting :func:`serve` yields a :class:`WebSocketServer`. This object + provides :meth:`~WebSocketServer.close` and + :meth:`~WebSocketServer.wait_closed` methods for shutting down the server. + + :func:`serve` can be used as an asynchronous context manager:: + + stop = asyncio.Future() # set this future to exit the server + + async with serve(...): + await stop + + The server is shut down automatically when exiting the context. + + Args: + ws_handler: connection handler. It receives the WebSocket connection, + which is a :class:`WebSocketServerProtocol`, in argument. + host: network interfaces the server is bound to; + see :meth:`~asyncio.loop.create_server` for details. + port: TCP port the server listens on; + see :meth:`~asyncio.loop.create_server` for details. + create_protocol: factory for the :class:`asyncio.Protocol` managing + the connection; defaults to :class:`WebSocketServerProtocol`; may + be set to a wrapper or a subclass to customize connection handling. + logger: logger for this server; + defaults to ``logging.getLogger("websockets.server")``; + see the :doc:`logging guide <../topics/logging>` for details. + compression: shortcut that enables the "permessage-deflate" extension + by default; may be set to :obj:`None` to disable compression; + see the :doc:`compression guide <../topics/compression>` for details. + origins: acceptable values of the ``Origin`` header; include + :obj:`None` in the list if the lack of an origin is acceptable. + This is useful for defending against Cross-Site WebSocket + Hijacking attacks. + extensions: list of supported extensions, in order in which they + should be tried. + subprotocols: list of supported subprotocols, in order of decreasing + preference. + extra_headers (Union[HeadersLike, Callable[[str, Headers], HeadersLike]]): + arbitrary HTTP headers to add to the request; this can be + a :data:`~websockets.datastructures.HeadersLike` or a callable + taking the request path and headers in arguments and returning + a :data:`~websockets.datastructures.HeadersLike`. + process_request (Optional[Callable[[str, Headers], \ + Awaitable[Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]]]]): + intercept HTTP request before the opening handshake; + see :meth:`~WebSocketServerProtocol.process_request` for details. + select_subprotocol: select a subprotocol supported by the client; + see :meth:`~WebSocketServerProtocol.select_subprotocol` for details. + + See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the + documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``. + + Any other keyword arguments are passed the event loop's + :meth:`~asyncio.loop.create_server` method. + + For example: + + * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enable TLS. + + * You can set ``sock`` to a :obj:`~socket.socket` that you created + outside of websockets. + + Returns: + WebSocketServer: WebSocket server. + + """ + + def __init__( + self, + ws_handler: Union[ + Callable[[WebSocketServerProtocol], Awaitable[Any]], + Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated + ], + host: Optional[Union[str, Sequence[str]]] = None, + port: Optional[int] = None, + *, + create_protocol: Optional[Callable[[Any], WebSocketServerProtocol]] = None, + logger: Optional[LoggerLike] = None, + compression: Optional[str] = "deflate", + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] + ] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2**20, + max_queue: Optional[int] = 2**5, + read_limit: int = 2**16, + write_limit: int = 2**16, + **kwargs: Any, + ) -> None: + # Backwards compatibility: close_timeout used to be called timeout. + timeout: Optional[float] = kwargs.pop("timeout", None) + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + # Backwards compatibility: create_protocol used to be called klass. + klass: Optional[Type[WebSocketServerProtocol]] = kwargs.pop("klass", None) + if klass is None: + klass = WebSocketServerProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) + # If both are specified, klass is ignored. + if create_protocol is None: + create_protocol = klass + + # Backwards compatibility: recv() used to return None on closed connections + legacy_recv: bool = kwargs.pop("legacy_recv", False) + + # Backwards compatibility: the loop parameter used to be supported. + _loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None) + if _loop is None: + loop = asyncio.get_event_loop() + else: + loop = _loop + warnings.warn("remove loop argument", DeprecationWarning) + + ws_server = WebSocketServer(logger=logger) + + secure = kwargs.get("ssl") is not None + + if compression == "deflate": + extensions = enable_server_permessage_deflate(extensions) + elif compression is not None: + raise ValueError(f"unsupported compression: {compression}") + + if subprotocols is not None: + validate_subprotocols(subprotocols) + + factory = functools.partial( + create_protocol, + # For backwards compatibility with 10.0 or earlier. Done here in + # addition to WebSocketServerProtocol to trigger the deprecation + # warning once per serve() call rather than once per connection. + remove_path_argument(ws_handler), + ws_server, + host=host, + port=port, + secure=secure, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + close_timeout=close_timeout, + max_size=max_size, + max_queue=max_queue, + read_limit=read_limit, + write_limit=write_limit, + loop=_loop, + legacy_recv=legacy_recv, + origins=origins, + extensions=extensions, + subprotocols=subprotocols, + extra_headers=extra_headers, + process_request=process_request, + select_subprotocol=select_subprotocol, + logger=logger, + ) + + if kwargs.pop("unix", False): + path: Optional[str] = kwargs.pop("path", None) + # unix_serve(path) must not specify host and port parameters. + assert host is None and port is None + create_server = functools.partial( + loop.create_unix_server, factory, path, **kwargs + ) + else: + create_server = functools.partial( + loop.create_server, factory, host, port, **kwargs + ) + + # This is a coroutine function. + self._create_server = create_server + self.ws_server = ws_server + + # async with serve(...) + + async def __aenter__(self) -> WebSocketServer: + return await self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + self.ws_server.close() + await self.ws_server.wait_closed() + + # await serve(...) + + def __await__(self) -> Generator[Any, None, WebSocketServer]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl__().__await__() + + async def __await_impl__(self) -> WebSocketServer: + server = await self._create_server() + self.ws_server.wrap(server) + return self.ws_server + + # yield from serve(...) - remove when dropping Python < 3.10 + + __iter__ = __await__ + + +serve = Serve + + +def unix_serve( + ws_handler: Union[ + Callable[[WebSocketServerProtocol], Awaitable[Any]], + Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated + ], + path: Optional[str] = None, + **kwargs: Any, +) -> Serve: + """ + Similar to :func:`serve`, but for listening on Unix sockets. + + This function builds upon the event + loop's :meth:`~asyncio.loop.create_unix_server` method. + + It is only available on Unix. + + It's useful for deploying a server behind a reverse proxy such as nginx. + + Args: + path: file system path to the Unix socket. + + """ + return serve(ws_handler, path=path, unix=True, **kwargs) + + +def remove_path_argument( + ws_handler: Union[ + Callable[[WebSocketServerProtocol], Awaitable[Any]], + Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + ] +) -> Callable[[WebSocketServerProtocol], Awaitable[Any]]: + try: + inspect.signature(ws_handler).bind(None) + except TypeError: + try: + inspect.signature(ws_handler).bind(None, "") + except TypeError: # pragma: no cover + # ws_handler accepts neither one nor two arguments; leave it alone. + pass + else: + # ws_handler accepts two arguments; activate backwards compatibility. + + # Enable deprecation warning and announce deprecation in 11.0. + # warnings.warn("remove second argument of ws_handler", DeprecationWarning) + + async def _ws_handler(websocket: WebSocketServerProtocol) -> Any: + return await cast( + Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + ws_handler, + )(websocket, websocket.path) + + return _ws_handler + + return cast( + Callable[[WebSocketServerProtocol], Awaitable[Any]], + ws_handler, + ) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/py.typed b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/server.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/server.py new file mode 100644 index 00000000..5dad50b6 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/server.py @@ -0,0 +1,521 @@ +from __future__ import annotations + +import base64 +import binascii +import email.utils +import http +from typing import Generator, List, Optional, Sequence, Tuple, cast + +from .connection import CONNECTING, OPEN, SERVER, Connection, State +from .datastructures import Headers, MultipleValuesError +from .exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidHeaderValue, + InvalidOrigin, + InvalidStatus, + InvalidUpgrade, + NegotiationError, +) +from .extensions import Extension, ServerExtensionFactory +from .headers import ( + build_extension, + parse_connection, + parse_extension, + parse_subprotocol, + parse_upgrade, +) +from .http import USER_AGENT +from .http11 import Request, Response +from .typing import ( + ConnectionOption, + ExtensionHeader, + LoggerLike, + Origin, + Subprotocol, + UpgradeProtocol, +) +from .utils import accept_key + + +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.server import * # isort:skip # noqa + + +__all__ = ["ServerConnection"] + + +class ServerConnection(Connection): + """ + Sans-I/O implementation of a WebSocket server connection. + + Args: + origins: acceptable values of the ``Origin`` header; include + :obj:`None` in the list if the lack of an origin is acceptable. + This is useful for defending against Cross-Site WebSocket + Hijacking attacks. + extensions: list of supported extensions, in order in which they + should be tried. + subprotocols: list of supported subprotocols, in order of decreasing + preference. + state: initial state of the WebSocket connection. + max_size: maximum size of incoming messages in bytes; + :obj:`None` to disable the limit. + logger: logger for this connection; + defaults to ``logging.getLogger("websockets.client")``; + see the :doc:`logging guide <../topics/logging>` for details. + + """ + + def __init__( + self, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + state: State = CONNECTING, + max_size: Optional[int] = 2**20, + logger: Optional[LoggerLike] = None, + ): + super().__init__( + side=SERVER, + state=state, + max_size=max_size, + logger=logger, + ) + self.origins = origins + self.available_extensions = extensions + self.available_subprotocols = subprotocols + + def accept(self, request: Request) -> Response: + """ + Create a handshake response to accept the connection. + + If the connection cannot be established, the handshake response + actually rejects the handshake. + + You must send the handshake response with :meth:`send_response`. + + You can modify it before sending it, for example to add HTTP headers. + + Args: + request: WebSocket handshake request event received from the client. + + Returns: + Response: WebSocket handshake response event to send to the client. + + """ + try: + ( + accept_header, + extensions_header, + protocol_header, + ) = self.process_request(request) + except InvalidOrigin as exc: + request._exception = exc + self.handshake_exc = exc + if self.debug: + self.logger.debug("! invalid origin", exc_info=True) + return self.reject( + http.HTTPStatus.FORBIDDEN, + f"Failed to open a WebSocket connection: {exc}.\n", + ) + except InvalidUpgrade as exc: + request._exception = exc + self.handshake_exc = exc + if self.debug: + self.logger.debug("! invalid upgrade", exc_info=True) + response = self.reject( + http.HTTPStatus.UPGRADE_REQUIRED, + ( + f"Failed to open a WebSocket connection: {exc}.\n" + f"\n" + f"You cannot access a WebSocket server directly " + f"with a browser. You need a WebSocket client.\n" + ), + ) + response.headers["Upgrade"] = "websocket" + return response + except InvalidHandshake as exc: + request._exception = exc + self.handshake_exc = exc + if self.debug: + self.logger.debug("! invalid handshake", exc_info=True) + return self.reject( + http.HTTPStatus.BAD_REQUEST, + f"Failed to open a WebSocket connection: {exc}.\n", + ) + except Exception as exc: + request._exception = exc + self.handshake_exc = exc + self.logger.error("opening handshake failed", exc_info=True) + return self.reject( + http.HTTPStatus.INTERNAL_SERVER_ERROR, + ( + "Failed to open a WebSocket connection.\n" + "See server log for more information.\n" + ), + ) + + headers = Headers() + + headers["Date"] = email.utils.formatdate(usegmt=True) + + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Accept"] = accept_header + + if extensions_header is not None: + headers["Sec-WebSocket-Extensions"] = extensions_header + + if protocol_header is not None: + headers["Sec-WebSocket-Protocol"] = protocol_header + + headers["Server"] = USER_AGENT + + self.logger.info("connection open") + return Response(101, "Switching Protocols", headers) + + def process_request( + self, request: Request + ) -> Tuple[str, Optional[str], Optional[str]]: + """ + Check a handshake request and negotiate extensions and subprotocol. + + This function doesn't verify that the request is an HTTP/1.1 or higher + GET request and doesn't check the ``Host`` header. These controls are + usually performed earlier in the HTTP request handling code. They're + the responsibility of the caller. + + Args: + request: WebSocket handshake request received from the client. + + Returns: + Tuple[str, Optional[str], Optional[str]]: + ``Sec-WebSocket-Accept``, ``Sec-WebSocket-Extensions``, and + ``Sec-WebSocket-Protocol`` headers for the handshake response. + + Raises: + InvalidHandshake: if the handshake request is invalid; + then the server must return 400 Bad Request error. + + """ + headers = request.headers + + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade( + "Connection", ", ".join(connection) if connection else None + ) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None) + + try: + key = headers["Sec-WebSocket-Key"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Key") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" + ) from exc + + try: + raw_key = base64.b64decode(key.encode(), validate=True) + except binascii.Error as exc: + raise InvalidHeaderValue("Sec-WebSocket-Key", key) from exc + if len(raw_key) != 16: + raise InvalidHeaderValue("Sec-WebSocket-Key", key) + + try: + version = headers["Sec-WebSocket-Version"] + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Version") from exc + except MultipleValuesError as exc: + raise InvalidHeader( + "Sec-WebSocket-Version", + "more than one Sec-WebSocket-Version header found", + ) from exc + + if version != "13": + raise InvalidHeaderValue("Sec-WebSocket-Version", version) + + accept_header = accept_key(key) + + self.origin = self.process_origin(headers) + + extensions_header, self.extensions = self.process_extensions(headers) + + protocol_header = self.subprotocol = self.process_subprotocol(headers) + + return ( + accept_header, + extensions_header, + protocol_header, + ) + + def process_origin(self, headers: Headers) -> Optional[Origin]: + """ + Handle the Origin HTTP request header. + + Args: + headers: WebSocket handshake request headers. + + Returns: + Optional[Origin]: origin, if it is acceptable. + + Raises: + InvalidOrigin: if the origin isn't acceptable. + + """ + # "The user agent MUST NOT include more than one Origin header field" + # per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3. + try: + origin = cast(Optional[Origin], headers.get("Origin")) + except MultipleValuesError as exc: + raise InvalidHeader("Origin", "more than one Origin header found") from exc + if self.origins is not None: + if origin not in self.origins: + raise InvalidOrigin(origin) + return origin + + def process_extensions( + self, + headers: Headers, + ) -> Tuple[Optional[str], List[Extension]]: + """ + Handle the Sec-WebSocket-Extensions HTTP request header. + + Accept or reject each extension proposed in the client request. + Negotiate parameters for accepted extensions. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension proposed by + the client, we check for a match with each extension available in the + server configuration. If no match is found, the extension is ignored. + + If several variants of the same extension are proposed by the client, + it may be accepted several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + This process doesn't allow the server to reorder extensions. It can + only select a subset of the extensions proposed by the client. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + Args: + headers: WebSocket handshake request headers. + + Returns: + Tuple[Optional[str], List[Extension]]: ``Sec-WebSocket-Extensions`` + HTTP response header and list of accepted extensions. + + Raises: + InvalidHandshake: to abort the handshake with an HTTP 400 error. + + """ + response_header_value: Optional[str] = None + + extension_headers: List[ExtensionHeader] = [] + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values and self.available_extensions: + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, request_params in parsed_header_values: + + for ext_factory in self.available_extensions: + + # Skip non-matching extensions based on their name. + if ext_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + response_params, extension = ext_factory.process_request_params( + request_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + extension_headers.append((name, response_params)) + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the client sent. The extension is declined. + + # Serialize extension header. + if extension_headers: + response_header_value = build_extension(extension_headers) + + return response_header_value, accepted_extensions + + def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP request header. + + Args: + headers: WebSocket handshake request headers. + + Returns: + Optional[Subprotocol]: Subprotocol, if one was selected; this is + also the value of the ``Sec-WebSocket-Protocol`` response header. + + Raises: + InvalidHandshake: to abort the handshake with an HTTP 400 error. + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values and self.available_subprotocols: + + parsed_header_values: List[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + subprotocol = self.select_subprotocol( + parsed_header_values, self.available_subprotocols + ) + + return subprotocol + + def select_subprotocol( + self, + client_subprotocols: Sequence[Subprotocol], + server_subprotocols: Sequence[Subprotocol], + ) -> Optional[Subprotocol]: + """ + Pick a subprotocol among those offered by the client. + + If several subprotocols are supported by the client and the server, + the default implementation selects the preferred subprotocols by + giving equal value to the priorities of the client and the server. + + If no common subprotocol is supported by the client and the server, it + proceeds without a subprotocol. + + This is unlikely to be the most useful implementation in practice, as + many servers providing a subprotocol will require that the client uses + that subprotocol. + + Args: + client_subprotocols: list of subprotocols offered by the client. + server_subprotocols: list of subprotocols available on the server. + + Returns: + Optional[Subprotocol]: Subprotocol, if a common subprotocol was + found. + + """ + subprotocols = set(client_subprotocols) & set(server_subprotocols) + if not subprotocols: + return None + priority = lambda p: ( + client_subprotocols.index(p) + server_subprotocols.index(p) + ) + return sorted(subprotocols, key=priority)[0] + + def reject( + self, + status: http.HTTPStatus, + text: str, + ) -> Response: + """ + Create a handshake response to reject the connection. + + A short plain text response is the best fallback when failing to + establish a WebSocket connection. + + You must send the handshake response with :meth:`send_response`. + + You can modify it before sending it, for example to alter HTTP headers. + + Args: + status: HTTP status code. + text: HTTP response body; will be encoded to UTF-8. + + Returns: + Response: WebSocket handshake response event to send to the client. + + """ + body = text.encode() + headers = Headers( + [ + ("Date", email.utils.formatdate(usegmt=True)), + ("Connection", "close"), + ("Content-Length", str(len(body))), + ("Content-Type", "text/plain; charset=utf-8"), + ("Server", USER_AGENT), + ] + ) + response = Response(status.value, status.phrase, headers, body) + # When reject() is called from accept(), handshake_exc is already set. + # If a user calls reject(), set handshake_exc to guarantee invariant: + # "handshake_exc is None if and only if opening handshake succeded." + if self.handshake_exc is None: + self.handshake_exc = InvalidStatus(response) + self.logger.info("connection failed (%d %s)", status.value, status.phrase) + return response + + def send_response(self, response: Response) -> None: + """ + Send a handshake response to the client. + + Args: + response: WebSocket handshake response event to send. + + """ + if self.debug: + code, phrase = response.status_code, response.reason_phrase + self.logger.debug("> HTTP/1.1 %d %s", code, phrase) + for key, value in response.headers.raw_items(): + self.logger.debug("> %s: %s", key, value) + if response.body is not None: + self.logger.debug("> [body] (%d bytes)", len(response.body)) + + self.writes.append(response.serialize()) + + if response.status_code == 101: + assert self.state is CONNECTING + self.state = OPEN + else: + self.send_eof() + self.parser = self.discard() + next(self.parser) # start coroutine + + def parse(self) -> Generator[None, None, None]: + if self.state is CONNECTING: + request = yield from Request.parse(self.reader.read_line) + + if self.debug: + self.logger.debug("< GET %s HTTP/1.1", request.path) + for key, value in request.headers.raw_items(): + self.logger.debug("< %s: %s", key, value) + + self.events.append(request) + + yield from super().parse() diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/speedups.cp37-win_amd64.pyd b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/speedups.cp37-win_amd64.pyd new file mode 100644 index 00000000..441b89e6 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/speedups.cp37-win_amd64.pyd differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/streams.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/streams.py new file mode 100644 index 00000000..f861d4bd --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/streams.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from typing import Generator + + +class StreamReader: + """ + Generator-based stream reader. + + This class doesn't support concurrent calls to :meth:`read_line`, + :meth:`read_exact`, or :meth:`read_to_eof`. Make sure calls are + serialized. + + """ + + def __init__(self) -> None: + self.buffer = bytearray() + self.eof = False + + def read_line(self, m: int) -> Generator[None, None, bytes]: + """ + Read a LF-terminated line from the stream. + + This is a generator-based coroutine. + + The return value includes the LF character. + + Args: + m: maximum number bytes to read; this is a security limit. + + Raises: + EOFError: if the stream ends without a LF. + RuntimeError: if the stream ends in more than ``m`` bytes. + + """ + n = 0 # number of bytes to read + p = 0 # number of bytes without a newline + while True: + n = self.buffer.find(b"\n", p) + 1 + if n > 0: + break + p = len(self.buffer) + if p > m: + raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes") + if self.eof: + raise EOFError(f"stream ends after {p} bytes, before end of line") + yield + if n > m: + raise RuntimeError(f"read {n} bytes, expected no more than {m} bytes") + r = self.buffer[:n] + del self.buffer[:n] + return r + + def read_exact(self, n: int) -> Generator[None, None, bytes]: + """ + Read a given number of bytes from the stream. + + This is a generator-based coroutine. + + Args: + n: how many bytes to read. + + Raises: + EOFError: if the stream ends in less than ``n`` bytes. + + """ + assert n >= 0 + while len(self.buffer) < n: + if self.eof: + p = len(self.buffer) + raise EOFError(f"stream ends after {p} bytes, expected {n} bytes") + yield + r = self.buffer[:n] + del self.buffer[:n] + return r + + def read_to_eof(self, m: int) -> Generator[None, None, bytes]: + """ + Read all bytes from the stream. + + This is a generator-based coroutine. + + Args: + m: maximum number bytes to read; this is a security limit. + + Raises: + RuntimeError: if the stream ends in more than ``m`` bytes. + + """ + while not self.eof: + p = len(self.buffer) + if p > m: + raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes") + yield + r = self.buffer[:] + del self.buffer[:] + return r + + def at_eof(self) -> Generator[None, None, bool]: + """ + Tell whether the stream has ended and all data was read. + + This is a generator-based coroutine. + + """ + while True: + if self.buffer: + return False + if self.eof: + return True + # When all data was read but the stream hasn't ended, we can't + # tell if until either feed_data() or feed_eof() is called. + yield + + def feed_data(self, data: bytes) -> None: + """ + Write data to the stream. + + :meth:`feed_data` cannot be called after :meth:`feed_eof`. + + Args: + data: data to write. + + Raises: + EOFError: if the stream has ended. + + """ + if self.eof: + raise EOFError("stream ended") + self.buffer += data + + def feed_eof(self) -> None: + """ + End the stream. + + :meth:`feed_eof` cannot be called more than once. + + Raises: + EOFError: if the stream has ended. + + """ + if self.eof: + raise EOFError("stream ended") + self.eof = True + + def discard(self) -> None: + """ + Discard all buffered data, but don't end the stream. + + """ + del self.buffer[:] diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/typing.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/typing.py new file mode 100644 index 00000000..e672ba00 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/typing.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import logging +from typing import List, NewType, Optional, Tuple, Union + + +__all__ = [ + "Data", + "LoggerLike", + "Origin", + "Subprotocol", + "ExtensionName", + "ExtensionParameter", +] + + +# Public types used in the signature of public APIs + +Data = Union[str, bytes] +"""Types supported in a WebSocket message: +:class:`str` for a Text_ frame, :class:`bytes` for a Binary_. + +.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 +.. _Binary : https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6 + +""" + + +LoggerLike = Union[logging.Logger, logging.LoggerAdapter] +"""Types accepted where a :class:`~logging.Logger` is expected.""" + + +Origin = NewType("Origin", str) +"""Value of a ``Origin`` header.""" + + +Subprotocol = NewType("Subprotocol", str) +"""Subprotocol in a ``Sec-WebSocket-Protocol`` header.""" + + +ExtensionName = NewType("ExtensionName", str) +"""Name of a WebSocket extension.""" + + +ExtensionParameter = Tuple[str, Optional[str]] +"""Parameter of a WebSocket extension.""" + + +# Private types + +ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]] +"""Extension in a ``Sec-WebSocket-Extensions`` header.""" + + +ConnectionOption = NewType("ConnectionOption", str) +"""Connection option in a ``Connection`` header.""" + + +UpgradeProtocol = NewType("UpgradeProtocol", str) +"""Upgrade protocol in an ``Upgrade`` header.""" diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/uri.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/uri.py new file mode 100644 index 00000000..fff0c380 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/uri.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import dataclasses +import urllib.parse +from typing import Optional, Tuple + +from . import exceptions + + +__all__ = ["parse_uri", "WebSocketURI"] + + +@dataclasses.dataclass +class WebSocketURI: + """ + WebSocket URI. + + Attributes: + secure: :obj:`True` for a ``wss`` URI, :obj:`False` for a ``ws`` URI. + host: Normalized to lower case. + port: Always set even if it's the default. + path: May be empty. + query: May be empty if the URI doesn't include a query component. + username: Available when the URI contains `User Information`_. + password: Available when the URI contains `User Information`_. + + .. _User Information: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.1 + + """ + + secure: bool + host: str + port: int + path: str + query: str + username: Optional[str] + password: Optional[str] + + @property + def resource_name(self) -> str: + if self.path: + resource_name = self.path + else: + resource_name = "/" + if self.query: + resource_name += "?" + self.query + return resource_name + + @property + def user_info(self) -> Optional[Tuple[str, str]]: + if self.username is None: + return None + assert self.password is not None + return (self.username, self.password) + + +# All characters from the gen-delims and sub-delims sets in RFC 3987. +DELIMS = ":/?#[]@!$&'()*+,;=" + + +def parse_uri(uri: str) -> WebSocketURI: + """ + Parse and validate a WebSocket URI. + + Args: + uri: WebSocket URI. + + Returns: + WebSocketURI: Parsed WebSocket URI. + + Raises: + InvalidURI: if ``uri`` isn't a valid WebSocket URI. + + """ + parsed = urllib.parse.urlparse(uri) + if parsed.scheme not in ["ws", "wss"]: + raise exceptions.InvalidURI(uri, "scheme isn't ws or wss") + if parsed.hostname is None: + raise exceptions.InvalidURI(uri, "hostname isn't provided") + if parsed.fragment != "": + raise exceptions.InvalidURI(uri, "fragment identifier is meaningless") + + secure = parsed.scheme == "wss" + host = parsed.hostname + port = parsed.port or (443 if secure else 80) + path = parsed.path + query = parsed.query + username = parsed.username + password = parsed.password + # urllib.parse.urlparse accepts URLs with a username but without a + # password. This doesn't make sense for HTTP Basic Auth credentials. + if username is not None and password is None: + raise exceptions.InvalidURI(uri, "username provided without password") + + try: + uri.encode("ascii") + except UnicodeEncodeError: + # Input contains non-ASCII characters. + # It must be an IRI. Convert it to a URI. + host = host.encode("idna").decode() + path = urllib.parse.quote(path, safe=DELIMS) + query = urllib.parse.quote(query, safe=DELIMS) + if username is not None: + assert password is not None + username = urllib.parse.quote(username, safe=DELIMS) + password = urllib.parse.quote(password, safe=DELIMS) + + return WebSocketURI(secure, host, port, path, query, username, password) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/utils.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/utils.py new file mode 100644 index 00000000..c4040490 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/utils.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +import base64 +import hashlib +import secrets +import sys + + +__all__ = ["accept_key", "apply_mask"] + + +GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +def generate_key() -> str: + """ + Generate a random key for the Sec-WebSocket-Key header. + + """ + key = secrets.token_bytes(16) + return base64.b64encode(key).decode() + + +def accept_key(key: str) -> str: + """ + Compute the value of the Sec-WebSocket-Accept header. + + Args: + key: value of the Sec-WebSocket-Key header. + + """ + sha1 = hashlib.sha1((key + GUID).encode()).digest() + return base64.b64encode(sha1).decode() + + +def apply_mask(data: bytes, mask: bytes) -> bytes: + """ + Apply masking to the data of a WebSocket message. + + Args: + data: data to mask. + mask: 4-bytes mask. + + """ + if len(mask) != 4: + raise ValueError("mask must contain 4 bytes") + + data_int = int.from_bytes(data, sys.byteorder) + mask_repeated = mask * (len(data) // 4) + mask[: len(data) % 4] + mask_int = int.from_bytes(mask_repeated, sys.byteorder) + return (data_int ^ mask_int).to_bytes(len(data), sys.byteorder) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/version.py b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/version.py new file mode 100644 index 00000000..c30bfd68 --- /dev/null +++ b/Resources/WPy64-3720/python-3.7.2.amd64/Lib/site-packages/websockets/version.py @@ -0,0 +1,78 @@ +from __future__ import annotations + + +__all__ = ["tag", "version", "commit"] + + +# ========= =========== =================== +# release development +# ========= =========== =================== +# tag X.Y X.Y (upcoming) +# version X.Y X.Y.dev1+g5678cde +# commit X.Y 5678cde +# ========= =========== =================== + + +# When tagging a release, set `released = True`. +# After tagging a release, set `released = False` and increment `tag`. + +released = True + +tag = version = commit = "10.3" + + +if not released: # pragma: no cover + import pathlib + import re + import subprocess + + def get_version(tag: str) -> str: + # Since setup.py executes the contents of src/websockets/version.py, + # __file__ can point to either of these two files. + file_path = pathlib.Path(__file__) + root_dir = file_path.parents[0 if file_path.name == "setup.py" else 2] + + # Read version from git if available. This prevents reading stale + # information from src/websockets.egg-info after building a sdist. + try: + description = subprocess.run( + ["git", "describe", "--dirty", "--tags", "--long"], + capture_output=True, + cwd=root_dir, + timeout=1, + check=True, + text=True, + ).stdout.strip() + # subprocess.run raises FileNotFoundError if git isn't on $PATH. + except (FileNotFoundError, subprocess.CalledProcessError): + pass + else: + description_re = r"[0-9.]+-([0-9]+)-(g[0-9a-f]{7,}(?:-dirty)?)" + match = re.fullmatch(description_re, description) + assert match is not None + distance, remainder = match.groups() + remainder = remainder.replace("-", ".") # required by PEP 440 + return f"{tag}.dev{distance}+{remainder}" + + # Read version from package metadata if it is installed. + try: + import importlib.metadata # move up when dropping Python 3.7 + + return importlib.metadata.version("websockets") + except ImportError: + pass + + # Avoid crashing if the development version cannot be determined. + return f"{tag}.dev0+gunknown" + + version = get_version(tag) + + def get_commit(tag: str, version: str) -> str: + # Extract commit from version, falling back to tag if not available. + version_re = r"[0-9.]+\.dev[0-9]+\+g([0-9a-f]{7,}|unknown)(?:\.dirty)?" + match = re.fullmatch(version_re, version) + assert match is not None + (commit,) = match.groups() + return tag if commit == "unknown" else commit + + commit = get_commit(tag, version) diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/dotenv.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/dotenv.exe new file mode 100644 index 00000000..6479de88 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/dotenv.exe differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/uvicorn.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/uvicorn.exe new file mode 100644 index 00000000..44e93d24 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/uvicorn.exe differ diff --git a/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/watchfiles.exe b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/watchfiles.exe new file mode 100644 index 00000000..3f6a4a01 Binary files /dev/null and b/Resources/WPy64-3720/python-3.7.2.amd64/Scripts/watchfiles.exe differ diff --git a/Sources/pyOpenRPA/Orchestrator/Server.py b/Sources/pyOpenRPA/Orchestrator/Server.py index d702c4bb..0349fcee 100755 --- a/Sources/pyOpenRPA/Orchestrator/Server.py +++ b/Sources/pyOpenRPA/Orchestrator/Server.py @@ -6,6 +6,7 @@ # lResponseDict = {"Headers": {}, "SetCookies": {}, "Body": b"", "StatusCode": None} # self.OpenRPAResponseDict = lResponseDict +#from http.client import HTTPException from http.server import BaseHTTPRequestHandler, HTTPServer from socketserver import ThreadingMixIn import threading @@ -72,78 +73,10 @@ def __ComplexDictMerge2to1Overwrite__(in1Dict, in2Dict): in1Dict[lKeyStr] = in2Dict[lKeyStr] return in1Dict -#Authenticate function () -# return dict -# { -# "Domain": "", #Empty if Auth is not success -# "User": "" #Empty if Auth is not success -# } -def AuthenticateVerify(inRequest): - lResult={"Domain": "", "User": ""} - ###################################### - #Way 1 - try to find AuthToken - lCookies = cookies.SimpleCookie(inRequest.headers.get("Cookie", "")) - global gSettingsDict - #pdb.set_trace() - if "AuthToken" in lCookies: - lCookieAuthToken = lCookies.get("AuthToken", "").value - if lCookieAuthToken: - #Find AuthToken in GlobalDict - if lCookieAuthToken in gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("AuthTokensDict", {}): - #Auth Token Has Been Founded - lResult["Domain"] = gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lCookieAuthToken]["Domain"] - lResult["User"] = gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lCookieAuthToken]["User"] - #Set auth token - inRequest.OpenRPA["AuthToken"] = lCookieAuthToken - inRequest.OpenRPA["Domain"] = lResult["Domain"] - inRequest.OpenRPA["User"] = lResult["User"] - #Exit earlier - return lResult - ###################################### - #Way 2 - try to logon - lHeaderAuthorization = inRequest.headers.get("Authorization", "").split(" ") - if len(lHeaderAuthorization) == 2: - llHeaderAuthorizationDecodedUserPasswordList = base64.b64decode(lHeaderAuthorization[1]).decode("utf-8").split( - ":") - lUser = llHeaderAuthorizationDecodedUserPasswordList[0] - lPassword = llHeaderAuthorizationDecodedUserPasswordList[1] - lDomain = "" - if "\\" in lUser: - lDomain = lUser.split("\\")[0] - lUser = lUser.split("\\")[1] - lLogonBool = __Orchestrator__.OSCredentialsVerify(inUserStr=lUser, inPasswordStr=lPassword, inDomainStr=lDomain) - #Check result - if lLogonBool: - lResult["Domain"] = lDomain - lResult["User"] = lUser - #Create token - lAuthToken=str(uuid.uuid1()) - gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken] = {} - gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["Domain"] = lResult["Domain"] - gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["User"] = lResult["User"] - gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["FlagDoNotExpire"] = False - gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["TokenDatetime"] = datetime.datetime.now() - #Set-cookie - inRequest.OpenRPA["AuthToken"] = lAuthToken - inRequest.OpenRPA["Domain"] = lResult["Domain"] - inRequest.OpenRPA["User"] = lResult["User"] - inRequest.OpenRPASetCookie = {} - #New engine of server - inRequest.OpenRPAResponseDict["SetCookies"]["AuthToken"] = lAuthToken - #inRequest.OpenRPAResponse["Set-Cookie"]=[]lResult["Set-Cookie"] = lAuthToken - #pdb.set_trace() - #inRequest.send_header("Set-Cookie:", f"AuthToken={lAuthToken}") - ###################################### - return lResult + def AuthenticateBlock(inRequest): - # Send response status code - inRequest.send_response(401) - # Send headers - inRequest.send_header('Content-type', 'text/html') - inRequest.send_header('WWW-Authenticate', 'Basic') # Always ask login pass - inRequest.end_headers() - # Write content as utf-8 data - inRequest.wfile.write(bytes("", "utf8")) + raise HTTPException(status_code=401, detail="here is the details", headers={'Content-type':'text/html', 'WWW-Authenticate':'Basic'}) + #Check access before execute the action #return bool True - go execute, False - dont execute def UserAccessCheckBefore(inMethod, inRequest): @@ -236,8 +169,8 @@ def UserAccessCheckBefore(inMethod, inRequest): ##################################### #Return lResult return lResult -# HTTPRequestHandler class -class testHTTPServer_RequestHandler(BaseHTTPRequestHandler): + +class HTTPRequestOld(): # Def to check User Role access grants def UACClientCheck(self, inRoleKeyList): # Alias return self.UserRoleAccessAsk(inRoleKeyList=inRoleKeyList) @@ -418,7 +351,7 @@ class testHTTPServer_RequestHandler(BaseHTTPRequestHandler): # Send headers for lItemKey, lItemValue in inResponseDict["Headers"].items(): self.send_header(lItemKey, lItemValue) - # Send headers: Set-Cookie + # Send headers: Set-Cookie for lItemKey, lItemValue in inResponseDict["SetCookies"].items(): self.send_header("Set-Cookie", f"{lItemKey}={lItemValue}") #Close headers section in response @@ -444,75 +377,49 @@ class testHTTPServer_RequestHandler(BaseHTTPRequestHandler): # Prepare result dict lResponseDict = {"Headers": {}, "SetCookies": {}, "Body": b"", "StatusCode": None} self.OpenRPAResponseDict = lResponseDict - ############################ - #First - all with Flag UACBool - ############################ - for lURLItem in gSettingsDict["ServerDict"]["URLList"]: - #Check if all condition are applied - lFlagURLIsApplied=False - lFlagURLIsApplied=self.URLItemCheckDo(inURLItem=lURLItem, inMethod="GET", inOnlyFlagUACBool=True) - if lFlagURLIsApplied: - self.ResponseDictSend() - return - ##################################### - #Do authentication - #Check if authentication is turned on - ##################################### - lFlagAccessUserBlock=False - lAuthenticateDict = {"Domain": "", "User": ""} + #Check the user access (if flag, UAC) + #################################### + lFlagUserAccess = True + #If need user authentication if gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("FlagCredentialsAsk", False): - lAuthenticateDict = AuthenticateVerify(self) - if not lAuthenticateDict["User"]: - lFlagAccessUserBlock=True - # Logging - # gSettingsDict["Logger"].info(f"HTTP request /. Domain: {lAuthenticateDict['Domain']}, User: {lAuthenticateDict['User']}") - if lFlagAccessUserBlock: - AuthenticateBlock(self) - ##################################### - else: - #Check the user access (if flag) - #################################### - lFlagUserAccess = True - #If need user authentication - if gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("FlagCredentialsAsk", False): - lFlagUserAccess = UserAccessCheckBefore("GET", self) - ###################################### - if lFlagUserAccess: - if CrossOS.IS_WINDOWS_BOOL: lOrchestratorFolder = "\\".join(__file__.split("\\")[:-1]) - if CrossOS.IS_LINUX_BOOL: lOrchestratorFolder = "/".join(__file__.split("/")[:-1]) - ############################ - #New server engine (url from global dict (URLList)) - ############################ - for lURLItem in gSettingsDict["ServerDict"]["URLList"]: - #Check if all condition are applied - lFlagURLIsApplied=False - lFlagURLIsApplied=self.URLItemCheckDo(lURLItem, "GET") - if lFlagURLIsApplied: - self.ResponseDictSend() - return - #Monitor - if self.path == '/Monitor/JSONDaemonListGet': - # Send response status code - self.send_response(200) - # Send headers - self.send_header('Content-type','application/json') - self.end_headers() - # Send message back to client - message = json.dumps(gSettingsDict) - # Write content as utf-8 data - self.wfile.write(bytes(message, "utf8")) - #Filemanager function - if self.path.lower().startswith('/filemanager/'): - lFileURL=self.path[13:] - # check if file in FileURL - File Path Mapping Dict - if lFileURL.lower() in gSettingsDict["FileManager"]["FileURLFilePathDict"]: - self.SendResponseContentTypeFile('application/octet-stream', gSettingsDict["FileManager"]["FileURLFilePathDict"][lFileURL]) - else: - #Set access denied code + lFlagUserAccess = UserAccessCheckBefore("GET", self) + ###################################### + if lFlagUserAccess: + if CrossOS.IS_WINDOWS_BOOL: lOrchestratorFolder = "\\".join(__file__.split("\\")[:-1]) + if CrossOS.IS_LINUX_BOOL: lOrchestratorFolder = "/".join(__file__.split("/")[:-1]) + ############################ + #New server engine (url from global dict (URLList)) + ############################ + for lURLItem in gSettingsDict["ServerDict"]["URLList"]: + #Check if all condition are applied + lFlagURLIsApplied=False + lFlagURLIsApplied=self.URLItemCheckDo(lURLItem, "GET") + if lFlagURLIsApplied: + self.ResponseDictSend() + return + #Monitor + if self.path == '/Monitor/JSONDaemonListGet': # Send response status code - self.send_response(403) + self.send_response(200) # Send headers + self.send_header('Content-type','application/json') self.end_headers() + # Send message back to client + message = json.dumps(gSettingsDict) + # Write content as utf-8 data + self.wfile.write(bytes(message, "utf8")) + #Filemanager function + if self.path.lower().startswith('/filemanager/'): + lFileURL=self.path[13:] + # check if file in FileURL - File Path Mapping Dict + if lFileURL.lower() in gSettingsDict["FileManager"]["FileURLFilePathDict"]: + self.SendResponseContentTypeFile('application/octet-stream', gSettingsDict["FileManager"]["FileURLFilePathDict"][lFileURL]) + else: + #Set access denied code + # Send response status code + self.send_response(403) + # Send headers + self.end_headers() except BrokenPipeError as e: lL = gSettingsDict["Logger"] if lL: lL.warning(f"Сервер (do_GET): Возникла ошибка сети - BrokenPipeError: [Errno 32] Broken pipe. Сервер продолжает работу") @@ -537,154 +444,188 @@ class testHTTPServer_RequestHandler(BaseHTTPRequestHandler): #pdb.set_trace() lResponseDict = {"Headers": {}, "SetCookies":{}, "Body": b"", "StatusCode": None} self.OpenRPAResponseDict = lResponseDict - ############################ - #First - all with Flag UACBool - ############################ - for lURLItem in gSettingsDict["ServerDict"]["URLList"]: - #Check if all condition are applied - lFlagURLIsApplied=False - lFlagURLIsApplied=self.URLItemCheckDo(inURLItem=lURLItem, inMethod="POST", inOnlyFlagUACBool=True) - if lFlagURLIsApplied: - self.ResponseDictSend() - return - ##################################### - #Do authentication - #Check if authentication is turned on - ##################################### - lFlagAccessUserBlock=False - lAuthenticateDict = {"Domain": "", "User": ""} - lIsSuperToken = False # Is supertoken + #Check the user access (if flag) + #################################### + lFlagUserAccess = True + #If need user authentication if gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("FlagCredentialsAsk", False): - lAuthenticateDict = AuthenticateVerify(self) - # Get Flag is supertoken (True|False) - lIsSuperToken = gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("AuthTokensDict", {}).get(self.OpenRPA["AuthToken"], {}).get("FlagDoNotExpire", False) - if not lAuthenticateDict["User"]: - lFlagAccessUserBlock=True - if lFlagAccessUserBlock: - AuthenticateBlock(self) - ##################################### - else: - #Check the user access (if flag) - #################################### - lFlagUserAccess = True - #If need user authentication - if gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("FlagCredentialsAsk", False): - lFlagUserAccess = UserAccessCheckBefore("POST", self) - ###################################### - if lFlagUserAccess: - lOrchestratorFolder = "\\".join(__file__.split("\\")[:-1]) - ############################ - #New server engine (url from global dict (URLList)) - ############################ - for lURLItem in gSettingsDict["ServerDict"]["URLList"]: - #Check if all condition are applied - lFlagURLIsApplied=False - lFlagURLIsApplied=self.URLItemCheckDo(lURLItem, "POST") - if lFlagURLIsApplied: - self.ResponseDictSend() - return - #Централизованная функция получения запросов/отправки - if self.path == '/Utils/Processor': - #ReadRequest - lInputObject={} - if self.headers.get('Content-Length') is not None: - lInputByteArrayLength = int(self.headers.get('Content-Length')) - lInputByteArray=self.rfile.read(lInputByteArrayLength) - #Превращение массива байт в объект - lInputObject=json.loads(lInputByteArray.decode('utf8')) - # Send response status code - self.send_response(200) - # Send headers - self.send_header('Content-type','application/json') - self.end_headers() - # Logging info about processor activity if not SuperToken () - if not lIsSuperToken: - lActivityTypeListStr = "" - try: - if type(lInputObject) is list: - for lActivityItem in lInputObject: - lActivityTypeListStr+=f"{lActivityItem['Type']}; " - else: - lActivityTypeListStr += f"{lInputObject['Type']}" - except Exception as e: - lActivityTypeListStr = "Has some error with Activity Type read" - if lL: lL.info(f"Сервер:: !ВНИМАНИЕ! /Utils/Processor через некоторое время перестанет поддерживаться. Используйте /pyOpenRPA/Processor или /pyOpenRPA/ActivityListExecute. Активность поступила от пользователя. Домен: {self.OpenRPA['Domain']}, Логин: {self.OpenRPA['User']}, Тип активности: {lActivityTypeListStr}") - # Send message back to client - message = json.dumps(ProcessorOld.ActivityListOrDict(lInputObject)) - # Write content as utf-8 data - self.wfile.write(bytes(message, "utf8")) - return - else: - #Set access denied code + lFlagUserAccess = UserAccessCheckBefore("POST", self) + ###################################### + if lFlagUserAccess: + lOrchestratorFolder = "\\".join(__file__.split("\\")[:-1]) + ############################ + #New server engine (url from global dict (URLList)) + ############################ + for lURLItem in gSettingsDict["ServerDict"]["URLList"]: + #Check if all condition are applied + lFlagURLIsApplied=False + lFlagURLIsApplied=self.URLItemCheckDo(lURLItem, "POST") + if lFlagURLIsApplied: + self.ResponseDictSend() + return + #Централизованная функция получения запросов/отправки + if self.path == '/Utils/Processor': + #ReadRequest + lInputObject={} + if self.headers.get('Content-Length') is not None: + lInputByteArrayLength = int(self.headers.get('Content-Length')) + lInputByteArray=self.rfile.read(lInputByteArrayLength) + #Превращение массива байт в объект + lInputObject=json.loads(lInputByteArray.decode('utf8')) # Send response status code - self.send_response(403) + self.send_response(200) # Send headers + self.send_header('Content-type','application/json') self.end_headers() - return + # Logging info about processor activity if not SuperToken () + if not lIsSuperToken: + lActivityTypeListStr = "" + try: + if type(lInputObject) is list: + for lActivityItem in lInputObject: + lActivityTypeListStr+=f"{lActivityItem['Type']}; " + else: + lActivityTypeListStr += f"{lInputObject['Type']}" + except Exception as e: + lActivityTypeListStr = "Has some error with Activity Type read" + if lL: lL.info(f"Сервер:: !ВНИМАНИЕ! /Utils/Processor через некоторое время перестанет поддерживаться. Используйте /pyOpenRPA/Processor или /pyOpenRPA/ActivityListExecute. Активность поступила от пользователя. Домен: {self.OpenRPA['Domain']}, Логин: {self.OpenRPA['User']}, Тип активности: {lActivityTypeListStr}") + # Send message back to client + message = json.dumps(ProcessorOld.ActivityListOrDict(lInputObject)) + # Write content as utf-8 data + self.wfile.write(bytes(message, "utf8")) + return + else: + # Send response status code + self.send_response(403) + # Send headers + self.end_headers() + return except BrokenPipeError as e: lL = gSettingsDict["Logger"] - if lL: lL.warning(f"Сервер (do_POST): Возникла ошибка сети - BrokenPipeError: [Errno 32] Broken pipe. Сервер продолжает работу") + if lL: lL.warning(f"Сервер, обратная совместимость (do_POST): Возникла ошибка сети - BrokenPipeError: [Errno 32] Broken pipe. Сервер продолжает работу") except Exception as e: lL = gSettingsDict["Logger"] - if lL: lL.exception(f"Сервер (do_POST): Неопознанная ошибка сети - см. текст ошибки. Сервер продолжает работу") - #Logging - #!Turn it on to stop print in console - #def log_message(self, format, *args): - # return + if lL: lL.exception(f"Сервер, обратная совместимость (do_POST): Неопознанная ошибка сети - см. текст ошибки. Сервер продолжает работу") -class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): - daemon_threads = True - """Handle requests in a separate thread.""" - def finish_request(self, request, client_address): - try: - global gSettingsDict - request.settimeout(gSettingsDict["ServerDict"]["RequestTimeoutSecFloat"]) - # "super" can not be used because BaseServer is not created from object - HTTPServer.finish_request(self, request, client_address) - except ConnectionResetError as e: - lL = gSettingsDict["Logger"] - if lL: lL.warning(f"Сервер (finish_request): Возникла ошибка сети - ConnectionResetError: [Errno 104] Connection reset by peer. Сервер продолжает работу") - except Exception as e: - lL = gSettingsDict["Logger"] - if lL: lL.exception(f"Сервер (finish_request): Неопознанная ошибка сети - см. текст ошибки. Сервер продолжает работу") - -#inGlobalDict -# "JSONConfigurationDict": -import ssl -class RobotDaemonServer(Thread): - def __init__(self,name,inGlobalDict): - Thread.__init__(self) - self.name = name - # Update the global dict - ServerSettings.SettingsUpdate(inGlobalDict) - def run(self): - global gSettingsDict - lL = gSettingsDict.get("Logger",None) - try: - lServerDict = gSettingsDict["ServerDict"]["ListenDict"][self.name] - lAddressStr=lServerDict["AddressStr"] - lPortInt=lServerDict["PortInt"] - lCertFilePathStr = lServerDict["CertFilePEMPathStr"] - lKeyFilePathStr = lServerDict["KeyFilePathStr"] - if lCertFilePathStr == "": lCertFilePathStr = None - if lKeyFilePathStr == "": lKeyFilePathStr = None - # Server settings - # Choose port 8080, for port 80, which is normally used for a http server, you need root access - server_address = (lAddressStr, lPortInt) - #httpd = HTTPServer(server_address, testHTTPServer_RequestHandler) - #httpd.serve_forever() - httpd = ThreadedHTTPServer(server_address, testHTTPServer_RequestHandler) - if lCertFilePathStr is not None: - if lKeyFilePathStr is not None: - httpd.socket = ssl.wrap_socket(httpd.socket, server_side=True, certfile=lCertFilePathStr, keyfile=lKeyFilePathStr) - else: - httpd.socket = ssl.wrap_socket(httpd.socket, server_side=True, certfile=lCertFilePathStr) - if lL: lL.info(f"Сервер инициализирован успешно (с поддержкой SSL):: Наименование: {self.name}, Слушает URL: {lAddressStr}, Слушает порт: {lPortInt}, Путь к файлу сертификата (.pem): {lCertFilePathStr}") - else: - if lL: lL.info(f"Сервер инициализирован успешно (без поддержки SSL):: Наименование: {self.name}, Слушает URL: {lAddressStr}, Слушает порт: {lPortInt}") - #print('Starting server, use to stop') - httpd.serve_forever() - except Exception as e: - if lL: lL.exception(f"Сервер:: Ошибка при инициализации - обратитесь в тех. поддержку pyOpenRPA") +from typing import Union +# объявление import +from fastapi import FastAPI, Form, Request, HTTPException, Depends, Header, Response +from fastapi.responses import PlainTextResponse, HTMLResponse, FileResponse +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates + +from pydantic import BaseModel +import random +import uvicorn + + +# инициализация +app = FastAPI() + +# hello world, метод GET, возврат строки +#@app.get("/", response_class=PlainTextResponse) +#async def hello(): +# return "Hello World!" + + +def IdentifyAuthorize(inRequest:Request, inResponse:Response, + inCookiesStr: Union[str, None] = Header(default=None,alias="Cookie"), + inAuthorizationStr: Union[str, None] = Header(default="",alias="Authorization")): + lResult={"Domain": "", "User": ""} + ###################################### + #Way 1 - try to find AuthToken + lCookies = cookies.SimpleCookie(inCookiesStr) # inRequest.headers.get("Cookie", "") + global gSettingsDict + lHeaderAuthorization = inAuthorizationStr.split(" ") + if "AuthToken" in lCookies: + lCookieAuthToken = lCookies.get("AuthToken", "").value + if lCookieAuthToken: + #Find AuthToken in GlobalDict + if lCookieAuthToken in gSettingsDict.get("ServerDict", {}).get("AccessUsers", {}).get("AuthTokensDict", {}): + #Auth Token Has Been Founded + lResult["Domain"] = gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lCookieAuthToken]["Domain"] + lResult["User"] = gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lCookieAuthToken]["User"] + #Set auth token + inRequest.OpenRPA={} + inRequest.OpenRPA["AuthToken"] = lCookieAuthToken + inRequest.OpenRPA["Domain"] = lResult["Domain"] + inRequest.OpenRPA["User"] = lResult["User"] + #Exit earlier + return lResult + ###################################### + #Way 2 - try to logon + elif len(lHeaderAuthorization) == 2: + llHeaderAuthorizationDecodedUserPasswordList = base64.b64decode(lHeaderAuthorization[1]).decode("utf-8").split( + ":") + lUser = llHeaderAuthorizationDecodedUserPasswordList[0] + lPassword = llHeaderAuthorizationDecodedUserPasswordList[1] + lDomain = "" + if "\\" in lUser: + lDomain = lUser.split("\\")[0] + lUser = lUser.split("\\")[1] + lLogonBool = __Orchestrator__.OSCredentialsVerify(inUserStr=lUser, inPasswordStr=lPassword, inDomainStr=lDomain) + #Check result + if lLogonBool: + lResult["Domain"] = lDomain + lResult["User"] = lUser + #Create token + lAuthToken=str(uuid.uuid1()) + gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken] = {} + gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["Domain"] = lResult["Domain"] + gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["User"] = lResult["User"] + gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["FlagDoNotExpire"] = False + gSettingsDict["ServerDict"]["AccessUsers"]["AuthTokensDict"][lAuthToken]["TokenDatetime"] = datetime.datetime.now() + #Set-cookie + inResponse.set_cookie(key="AuthToken",value=lAuthToken) + inRequest.OpenRPA={} + inRequest.OpenRPA["AuthToken"] = lAuthToken + inRequest.OpenRPA["Domain"] = lResult["Domain"] + inRequest.OpenRPA["User"] = lResult["User"] + #inRequest.OpenRPASetCookie = {} + #New engine of server + #inRequest.OpenRPAResponseDict["SetCookies"]["AuthToken"] = lAuthToken + else: + raise HTTPException(status_code=401, detail="here is the details", headers={}) + ###################################### + else: + raise HTTPException(status_code=401, detail="here is the details", headers={'Content-type':'text/html', 'WWW-Authenticate':'Basic'}) + return True + + +def BackwardCompatibityWrapper(): # Old from v1.3.1 (updated to FastAPI) + lHTTPRequest = HTTPRequestOld() + lHTTPRequest.do_GET + lHTTPRequest.do_POST + +@app.get(path="/", response_class=PlainTextResponse) +def Hi(t:bool=Depends(IdentifyAuthorize)): + return "Hello world" + + +def FastAPI(): + global gSettingsDict + global app + lL = gSettingsDict.get("Logger",None) + + + #lThreadServer = Server.RobotDaemonServer(lItemKeyStr, gSettingsDict) + #lThreadServer.start() + gSettingsDict["ServerDict"]["ServerThread"] = app + app.add_api_route( + path="/", + endpoint=ServerSettings.pyOpenRPA_Index, + response_class=PlainTextResponse + ) + + #app.get("/", ServerSettings.pyOpenRPA_Index, response_class=PlainTextResponse) + + uvicorn.run('pyOpenRPA.Orchestrator.Server:app', host='0.0.0.0', port=1024) + + + if lL: lL.info(f"Сервер инициализирован успешно (с поддержкой SSL):: Наименование: {self.name}, Слушает URL: {lAddressStr}, Слушает порт: {lPortInt}, Путь к файлу сертификата (.pem): {lCertFilePathStr}") + if lL: lL.info(f"Сервер инициализирован успешно (без поддержки SSL):: Наименование: {self.name}, Слушает URL: {lAddressStr}, Слушает порт: {lPortInt}") + + + if lL: lL.info("Модуль сервера FASTAPI") #Logging \ No newline at end of file diff --git a/Sources/pyOpenRPA/Orchestrator/__Orchestrator__.py b/Sources/pyOpenRPA/Orchestrator/__Orchestrator__.py index 6550b079..e3223d89 100755 --- a/Sources/pyOpenRPA/Orchestrator/__Orchestrator__.py +++ b/Sources/pyOpenRPA/Orchestrator/__Orchestrator__.py @@ -41,6 +41,8 @@ import math import glob # search the files import urllib + + #Единый глобальный словарь (За основу взять из Settings.py) gSettingsDict = None @@ -2756,10 +2758,9 @@ def Orchestrator(inGSettings=None, inDumpRestoreBool = True, inRunAsAdministrato lListenDict = gSettingsDict.get("ServerDict",{}).get("ListenDict",{}) for lItemKeyStr in lListenDict: lItemDict = lListenDict[lItemKeyStr] - lThreadServer = Server.RobotDaemonServer(lItemKeyStr, gSettingsDict) - lThreadServer.start() - gSettingsDict["ServerDict"]["ServerThread"] = lThreadServer - lItemDict["ServerInstance"] = lThreadServer + Server.FastAPI() + + # Init the RobotScreenActive in another thread lRobotScreenActiveThread = threading.Thread(target= Monitor.CheckScreen) diff --git a/Studio/pyOpenRPA.Studio_x64.cmd b/Studio/pyOpenRPA.Studio_x64.cmd index cca2c1d4..39948a1d 100755 --- a/Studio/pyOpenRPA.Studio_x64.cmd +++ b/Studio/pyOpenRPA.Studio_x64.cmd @@ -1,4 +1,5 @@ -cd %~dp0\..\Sources +chcp 65001 +cd /d "%~dp0\..\Sources" copy /Y ..\Resources\WPy64-3720\python-3.7.2.amd64\python.exe ..\Resources\WPy64-3720\python-3.7.2.amd64\OpenRPA_Studio.exe .\..\Resources\WPy64-3720\python-3.7.2.amd64\OpenRPA_Studio.exe -m pyOpenRPA.Studio "..\Studio\SettingsStudioExample.py" pause >nul \ No newline at end of file