зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1854496: update vendored aiohttp/requests and add new dependencies r=firefox-build-system-reviewers,mach-reviewers,ahochheiden
Update `aiohttp` to version 3.8.5 and `requests` to version 2.31.0, and vendor their respective dependencies. Add all the new dependencies to the various required site virtualenv requirements files. Differential Revision: https://phabricator.services.mozilla.com/D188904
This commit is contained in:
Родитель
6355de0665
Коммит
fcfe38a1ef
|
@ -10,11 +10,14 @@ vendored:testing/web-platform/tests/tools/wptserve
|
|||
vendored:third_party/python/Jinja2
|
||||
vendored:third_party/python/PyYAML/lib/
|
||||
vendored:third_party/python/aiohttp
|
||||
vendored:third_party/python/aiosignal
|
||||
vendored:third_party/python/appdirs
|
||||
vendored:third_party/python/arrow
|
||||
vendored:third_party/python/async_timeout
|
||||
vendored:third_party/python/asynctest
|
||||
vendored:third_party/python/binaryornot
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/compare_locales
|
||||
vendored:third_party/python/cookiecutter
|
||||
vendored:third_party/python/diskcache
|
||||
|
@ -22,6 +25,7 @@ vendored:third_party/python/dlmanager
|
|||
vendored:third_party/python/ecdsa
|
||||
vendored:third_party/python/fluent.migrate
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/frozenlist
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/glean_parser
|
||||
vendored:third_party/python/gyp/pylib
|
||||
|
|
|
@ -9,12 +9,15 @@ vendored:testing/web-platform/tests/tools/wptserve
|
|||
vendored:third_party/python/MarkupSafe/src
|
||||
vendored:third_party/python/PyYAML/lib/
|
||||
vendored:third_party/python/aiohttp
|
||||
vendored:third_party/python/aiosignal
|
||||
vendored:third_party/python/appdirs
|
||||
vendored:third_party/python/arrow
|
||||
vendored:third_party/python/async_timeout
|
||||
vendored:third_party/python/asynctest
|
||||
vendored:third_party/python/binaryornot
|
||||
vendored:third_party/python/cbor2
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/compare_locales
|
||||
vendored:third_party/python/cookiecutter
|
||||
vendored:third_party/python/cookies
|
||||
|
@ -25,6 +28,7 @@ vendored:third_party/python/ecdsa
|
|||
vendored:third_party/python/esprima
|
||||
vendored:third_party/python/fluent.migrate
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/frozenlist
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/glean_parser
|
||||
vendored:third_party/python/gyp/pylib
|
||||
|
|
|
@ -49,14 +49,18 @@ vendored:third_party/python/Jinja2
|
|||
vendored:third_party/python/MarkupSafe/src
|
||||
vendored:third_party/python/PyYAML/lib/
|
||||
vendored:third_party/python/aiohttp
|
||||
vendored:third_party/python/aiosignal
|
||||
vendored:third_party/python/appdirs
|
||||
vendored:third_party/python/arrow
|
||||
vendored:third_party/python/async_timeout
|
||||
vendored:third_party/python/asynctest
|
||||
vendored:third_party/python/binaryornot
|
||||
vendored:third_party/python/certifi
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/cookiecutter
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/frozenlist
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/jinja2_time
|
||||
vendored:third_party/python/json-e
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
vendored:third_party/python/PyYAML/lib/
|
||||
vendored:third_party/python/aiohttp
|
||||
vendored:third_party/python/aiosignal
|
||||
vendored:third_party/python/appdirs
|
||||
vendored:third_party/python/arrow
|
||||
vendored:third_party/python/async_timeout
|
||||
vendored:third_party/python/asynctest
|
||||
vendored:third_party/python/binaryornot
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/compare_locales
|
||||
vendored:third_party/python/cookiecutter
|
||||
vendored:third_party/python/esprima
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/frozenlist
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/jinja2_time
|
||||
vendored:third_party/python/json-e
|
||||
|
|
|
@ -61,6 +61,7 @@ vendored:third_party/python/attrs
|
|||
vendored:third_party/python/blessed
|
||||
vendored:third_party/python/certifi
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/click
|
||||
vendored:third_party/python/colorama
|
||||
vendored:third_party/python/distro
|
||||
|
|
|
@ -10,12 +10,15 @@ vendored:testing/web-platform/tests/tools/wptserve
|
|||
vendored:third_party/python/MarkupSafe/src
|
||||
vendored:third_party/python/PyYAML/lib/
|
||||
vendored:third_party/python/aiohttp
|
||||
vendored:third_party/python/aiosignal
|
||||
vendored:third_party/python/appdirs
|
||||
vendored:third_party/python/arrow
|
||||
vendored:third_party/python/async_timeout
|
||||
vendored:third_party/python/asynctest
|
||||
vendored:third_party/python/binaryornot
|
||||
vendored:third_party/python/cbor2
|
||||
vendored:third_party/python/chardet
|
||||
vendored:third_party/python/charset_normalizer
|
||||
vendored:third_party/python/compare_locales
|
||||
vendored:third_party/python/cookiecutter
|
||||
vendored:third_party/python/cookies
|
||||
|
@ -26,6 +29,7 @@ vendored:third_party/python/ecdsa
|
|||
vendored:third_party/python/esprima
|
||||
vendored:third_party/python/fluent.migrate
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/frozenlist
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/glean_parser
|
||||
vendored:third_party/python/gyp/pylib
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
=========
|
||||
Changelog
|
||||
=========
|
||||
|
||||
..
|
||||
You should *NOT* be adding new change log entries to this file, this
|
||||
file is managed by towncrier. You *may* edit previous change logs to
|
||||
|
@ -14,6 +10,396 @@ Changelog
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
3.8.5 (2023-07-19)
|
||||
==================
|
||||
|
||||
Security bugfixes
|
||||
-----------------
|
||||
|
||||
- Upgraded the vendored copy of llhttp_ to v8.1.1 -- by :user:`webknjaz`
|
||||
and :user:`Dreamsorcerer`.
|
||||
|
||||
Thanks to :user:`sethmlarson` for reporting this and providing us with
|
||||
comprehensive reproducer, workarounds and fixing details! For more
|
||||
information, see
|
||||
https://github.com/aio-libs/aiohttp/security/advisories/GHSA-45c4-8wx5-qw6w.
|
||||
|
||||
.. _llhttp: https://llhttp.org
|
||||
|
||||
`#7346 <https://github.com/aio-libs/aiohttp/issues/7346>`_
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Added information to C parser exceptions to show which character caused the error. -- by :user:`Dreamsorcerer`
|
||||
|
||||
`#7366 <https://github.com/aio-libs/aiohttp/issues/7366>`_
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fixed a transport is :data:`None` error -- by :user:`Dreamsorcerer`.
|
||||
|
||||
`#3355 <https://github.com/aio-libs/aiohttp/issues/3355>`_
|
||||
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.8.4 (2023-02-12)
|
||||
==================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fixed incorrectly overwriting cookies with the same name and domain, but different path.
|
||||
`#6638 <https://github.com/aio-libs/aiohttp/issues/6638>`_
|
||||
- Fixed ``ConnectionResetError`` not being raised after client disconnection in SSL environments.
|
||||
`#7180 <https://github.com/aio-libs/aiohttp/issues/7180>`_
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.8.3 (2022-09-21)
|
||||
==================
|
||||
|
||||
.. attention::
|
||||
|
||||
This is the last :doc:`aiohttp <index>` release tested under
|
||||
Python 3.6. The 3.9 stream is dropping it from the CI and the
|
||||
distribution package metadata.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Increased the upper boundary of the :doc:`multidict:index` dependency
|
||||
to allow for the version 6 -- by :user:`hugovk`.
|
||||
|
||||
It used to be limited below version 7 in :doc:`aiohttp <index>` v3.8.1 but
|
||||
was lowered in v3.8.2 via :pr:`6550` and never brought back, causing
|
||||
problems with dependency pins when upgrading. :doc:`aiohttp <index>` v3.8.3
|
||||
fixes that by recovering the original boundary of ``< 7``.
|
||||
`#6950 <https://github.com/aio-libs/aiohttp/issues/6950>`_
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.8.2 (2022-09-20, subsequently yanked on 2022-09-21)
|
||||
=====================================================
|
||||
|
||||
.. note::
|
||||
|
||||
This release has some compatibility fixes for Python 3.11 but it may
|
||||
still have some quirks. Some tests are still flaky in the CI.
|
||||
|
||||
.. caution::
|
||||
|
||||
This release has been yanked from PyPI. Modern pip will not pick it
|
||||
up automatically. The reason is that is has ``multidict < 6`` set in
|
||||
the distribution package metadata (see :pr:`6950`). Please, use
|
||||
``aiohttp ~= 3.8.3, != 3.8.1`` instead, if you can.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Added support for registering :rfc:`OPTIONS <9110#OPTIONS>`
|
||||
HTTP method handlers via :py:class:`~aiohttp.web.RouteTableDef`.
|
||||
`#4663 <https://github.com/aio-libs/aiohttp/issues/4663>`_
|
||||
- Started supporting :rfc:`authority-form <9112#authority-form>` and
|
||||
:rfc:`absolute-form <9112#absolute-form>` URLs on the server-side.
|
||||
`#6227 <https://github.com/aio-libs/aiohttp/issues/6227>`_
|
||||
- Fixed Python 3.11 incompatibilities by using Cython 0.29.25.
|
||||
`#6396 <https://github.com/aio-libs/aiohttp/issues/6396>`_
|
||||
- Extended the ``sock`` argument typing declaration of the
|
||||
:py:func:`~aiohttp.web.run_app` function as optionally
|
||||
accepting iterables.
|
||||
`#6401 <https://github.com/aio-libs/aiohttp/issues/6401>`_
|
||||
- Fixed a regression where :py:exc:`~asyncio.CancelledError`
|
||||
occurs on client disconnection.
|
||||
`#6719 <https://github.com/aio-libs/aiohttp/issues/6719>`_
|
||||
- Started exporting :py:class:`~aiohttp.web.PrefixedSubAppResource`
|
||||
under :py:mod:`aiohttp.web` -- by :user:`Dreamsorcerer`.
|
||||
|
||||
This fixes a regression introduced by :pr:`3469`.
|
||||
`#6889 <https://github.com/aio-libs/aiohttp/issues/6889>`_
|
||||
- Dropped the :class:`object` type possibility from
|
||||
the :py:attr:`aiohttp.ClientSession.timeout`
|
||||
property return type declaration.
|
||||
`#6917 <https://github.com/aio-libs/aiohttp/issues/6917>`_,
|
||||
`#6923 <https://github.com/aio-libs/aiohttp/issues/6923>`_
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Added clarification on configuring the app object with
|
||||
settings such as a database connection.
|
||||
`#4137 <https://github.com/aio-libs/aiohttp/issues/4137>`_
|
||||
- Extended the ``sock`` argument typing declaration of the
|
||||
:py:func:`~aiohttp.web.run_app` function as optionally
|
||||
accepting iterables.
|
||||
`#6401 <https://github.com/aio-libs/aiohttp/issues/6401>`_
|
||||
- Dropped the :class:`object` type possibility from
|
||||
the :py:attr:`aiohttp.ClientSession.timeout`
|
||||
property return type declaration.
|
||||
`#6917 <https://github.com/aio-libs/aiohttp/issues/6917>`_,
|
||||
`#6923 <https://github.com/aio-libs/aiohttp/issues/6923>`_
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Dropped Python 3.5 support, :doc:`aiohttp <index>` only works
|
||||
under Python 3.6 and higher from now on.
|
||||
`#4046 <https://github.com/aio-libs/aiohttp/issues/4046>`_
|
||||
|
||||
|
||||
Misc
|
||||
----
|
||||
|
||||
- Removed a deprecated usage of :py:func:`pytest.warns(None)
|
||||
<pytest.warns>` in tests.
|
||||
`#6663 <https://github.com/aio-libs/aiohttp/issues/6663>`_
|
||||
- `#6369 <https://github.com/aio-libs/aiohttp/issues/6369>`_, `#6399 <https://github.com/aio-libs/aiohttp/issues/6399>`_, `#6550 <https://github.com/aio-libs/aiohttp/issues/6550>`_, `#6708 <https://github.com/aio-libs/aiohttp/issues/6708>`_, `#6757 <https://github.com/aio-libs/aiohttp/issues/6757>`_, `#6857 <https://github.com/aio-libs/aiohttp/issues/6857>`_, `#6872 <https://github.com/aio-libs/aiohttp/issues/6872>`_.
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.8.1 (2021-11-14)
|
||||
==================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix the error in handling the return value of `getaddrinfo`.
|
||||
`getaddrinfo` will return an `(int, bytes)` tuple, if CPython could not handle the address family.
|
||||
It will cause a index out of range error in aiohttp. For example, if user compile CPython with
|
||||
`--disable-ipv6` option but his system enable the ipv6.
|
||||
`#5901 <https://github.com/aio-libs/aiohttp/issues/5901>`_
|
||||
- Do not install "examples" as a top-level package.
|
||||
`#6189 <https://github.com/aio-libs/aiohttp/issues/6189>`_
|
||||
- Restored ability to connect IPv6-only host.
|
||||
`#6195 <https://github.com/aio-libs/aiohttp/issues/6195>`_
|
||||
- Remove ``Signal`` from ``__all__``, replace ``aiohttp.Signal`` with ``aiosignal.Signal`` in docs
|
||||
`#6201 <https://github.com/aio-libs/aiohttp/issues/6201>`_
|
||||
- Made chunked encoding HTTP header check stricter.
|
||||
`#6305 <https://github.com/aio-libs/aiohttp/issues/6305>`_
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- update quick starter demo codes.
|
||||
`#6240 <https://github.com/aio-libs/aiohttp/issues/6240>`_
|
||||
- Added an explanation of how tiny timeouts affect performance to the client reference document.
|
||||
`#6274 <https://github.com/aio-libs/aiohttp/issues/6274>`_
|
||||
- Add flake8-docstrings to flake8 configuration, enable subset of checks.
|
||||
`#6276 <https://github.com/aio-libs/aiohttp/issues/6276>`_
|
||||
- Added information on running complex applications with additional tasks/processes -- :user:`Dreamsorcerer`.
|
||||
`#6278 <https://github.com/aio-libs/aiohttp/issues/6278>`_
|
||||
|
||||
|
||||
Misc
|
||||
----
|
||||
|
||||
- `#6205 <https://github.com/aio-libs/aiohttp/issues/6205>`_
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.8.0 (2021-10-31)
|
||||
==================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Added a ``GunicornWebWorker`` feature for extending the aiohttp server configuration by allowing the 'wsgi' coroutine to return ``web.AppRunner`` object.
|
||||
`#2988 <https://github.com/aio-libs/aiohttp/issues/2988>`_
|
||||
- Switch from ``http-parser`` to ``llhttp``
|
||||
`#3561 <https://github.com/aio-libs/aiohttp/issues/3561>`_
|
||||
- Use Brotli instead of brotlipy
|
||||
`#3803 <https://github.com/aio-libs/aiohttp/issues/3803>`_
|
||||
- Disable implicit switch-back to pure python mode. The build fails loudly if aiohttp
|
||||
cannot be compiled with C Accelerators. Use AIOHTTP_NO_EXTENSIONS=1 to explicitly
|
||||
disable C Extensions complication and switch to Pure-Python mode. Note that Pure-Python
|
||||
mode is significantly slower than compiled one.
|
||||
`#3828 <https://github.com/aio-libs/aiohttp/issues/3828>`_
|
||||
- Make access log use local time with timezone
|
||||
`#3853 <https://github.com/aio-libs/aiohttp/issues/3853>`_
|
||||
- Implemented ``readuntil`` in ``StreamResponse``
|
||||
`#4054 <https://github.com/aio-libs/aiohttp/issues/4054>`_
|
||||
- FileResponse now supports ETag.
|
||||
`#4594 <https://github.com/aio-libs/aiohttp/issues/4594>`_
|
||||
- Add a request handler type alias ``aiohttp.typedefs.Handler``.
|
||||
`#4686 <https://github.com/aio-libs/aiohttp/issues/4686>`_
|
||||
- ``AioHTTPTestCase`` is more async friendly now.
|
||||
|
||||
For people who use unittest and are used to use :py:exc:`~unittest.TestCase`
|
||||
it will be easier to write new test cases like the sync version of the :py:exc:`~unittest.TestCase` class,
|
||||
without using the decorator `@unittest_run_loop`, just `async def test_*`.
|
||||
The only difference is that for the people using python3.7 and below a new dependency is needed, it is ``asynctestcase``.
|
||||
`#4700 <https://github.com/aio-libs/aiohttp/issues/4700>`_
|
||||
- Add validation of HTTP header keys and values to prevent header injection.
|
||||
`#4818 <https://github.com/aio-libs/aiohttp/issues/4818>`_
|
||||
- Add predicate to ``AbstractCookieJar.clear``.
|
||||
Add ``AbstractCookieJar.clear_domain`` to clean all domain and subdomains cookies only.
|
||||
`#4942 <https://github.com/aio-libs/aiohttp/issues/4942>`_
|
||||
- Add keepalive_timeout parameter to web.run_app.
|
||||
`#5094 <https://github.com/aio-libs/aiohttp/issues/5094>`_
|
||||
- Tracing for client sent headers
|
||||
`#5105 <https://github.com/aio-libs/aiohttp/issues/5105>`_
|
||||
- Make type hints for http parser stricter
|
||||
`#5267 <https://github.com/aio-libs/aiohttp/issues/5267>`_
|
||||
- Add final declarations for constants.
|
||||
`#5275 <https://github.com/aio-libs/aiohttp/issues/5275>`_
|
||||
- Switch to external frozenlist and aiosignal libraries.
|
||||
`#5293 <https://github.com/aio-libs/aiohttp/issues/5293>`_
|
||||
- Don't send secure cookies by insecure transports.
|
||||
|
||||
By default, the transport is secure if https or wss scheme is used.
|
||||
Use `CookieJar(treat_as_secure_origin="http://127.0.0.1")` to override the default security checker.
|
||||
`#5571 <https://github.com/aio-libs/aiohttp/issues/5571>`_
|
||||
- Always create a new event loop in ``aiohttp.web.run_app()``.
|
||||
This adds better compatibility with ``asyncio.run()`` or if trying to run multiple apps in sequence.
|
||||
`#5572 <https://github.com/aio-libs/aiohttp/issues/5572>`_
|
||||
- Add ``aiohttp.pytest_plugin.AiohttpClient`` for static typing of pytest plugin.
|
||||
`#5585 <https://github.com/aio-libs/aiohttp/issues/5585>`_
|
||||
- Added a ``socket_factory`` argument to ``BaseTestServer``.
|
||||
`#5844 <https://github.com/aio-libs/aiohttp/issues/5844>`_
|
||||
- Add compression strategy parameter to enable_compression method.
|
||||
`#5909 <https://github.com/aio-libs/aiohttp/issues/5909>`_
|
||||
- Added support for Python 3.10 to Github Actions CI/CD workflows and fix the related deprecation warnings -- :user:`Hanaasagi`.
|
||||
`#5927 <https://github.com/aio-libs/aiohttp/issues/5927>`_
|
||||
- Switched ``chardet`` to ``charset-normalizer`` for guessing the HTTP payload body encoding -- :user:`Ousret`.
|
||||
`#5930 <https://github.com/aio-libs/aiohttp/issues/5930>`_
|
||||
- Added optional auto_decompress argument for HttpRequestParser
|
||||
`#5957 <https://github.com/aio-libs/aiohttp/issues/5957>`_
|
||||
- Added support for HTTPS proxies to the extent CPython's
|
||||
:py:mod:`asyncio` supports it -- by :user:`bmbouter`,
|
||||
:user:`jborean93` and :user:`webknjaz`.
|
||||
`#5992 <https://github.com/aio-libs/aiohttp/issues/5992>`_
|
||||
- Added ``base_url`` parameter to the initializer of :class:`~aiohttp.ClientSession`.
|
||||
`#6013 <https://github.com/aio-libs/aiohttp/issues/6013>`_
|
||||
- Add Trove classifier and create binary wheels for 3.10. -- :user:`hugovk`.
|
||||
`#6079 <https://github.com/aio-libs/aiohttp/issues/6079>`_
|
||||
- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes — :user:`asvetlov`.
|
||||
`#6139 <https://github.com/aio-libs/aiohttp/issues/6139>`_
|
||||
- Started shipping platform-specific arm64 wheels for Apple Silicon — :user:`asvetlov`.
|
||||
`#6139 <https://github.com/aio-libs/aiohttp/issues/6139>`_
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Modify _drain_helper() to handle concurrent `await resp.write(...)` or `ws.send_json(...)` calls without race-condition.
|
||||
`#2934 <https://github.com/aio-libs/aiohttp/issues/2934>`_
|
||||
- Started using `MultiLoopChildWatcher` when it's available under POSIX while setting up the test I/O loop.
|
||||
`#3450 <https://github.com/aio-libs/aiohttp/issues/3450>`_
|
||||
- Only encode content-disposition filename parameter using percent-encoding.
|
||||
Other parameters are encoded to quoted-string or RFC2231 extended parameter
|
||||
value.
|
||||
`#4012 <https://github.com/aio-libs/aiohttp/issues/4012>`_
|
||||
- Fixed HTTP client requests to honor ``no_proxy`` environment variables.
|
||||
`#4431 <https://github.com/aio-libs/aiohttp/issues/4431>`_
|
||||
- Fix supporting WebSockets proxies configured via environment variables.
|
||||
`#4648 <https://github.com/aio-libs/aiohttp/issues/4648>`_
|
||||
- Change return type on URLDispatcher to UrlMappingMatchInfo to improve type annotations.
|
||||
`#4748 <https://github.com/aio-libs/aiohttp/issues/4748>`_
|
||||
- Ensure a cleanup context is cleaned up even when an exception occurs during startup.
|
||||
`#4799 <https://github.com/aio-libs/aiohttp/issues/4799>`_
|
||||
- Added a new exception type for Unix socket client errors which provides a more useful error message.
|
||||
`#4984 <https://github.com/aio-libs/aiohttp/issues/4984>`_
|
||||
- Remove Transfer-Encoding and Content-Type headers for 204 in StreamResponse
|
||||
`#5106 <https://github.com/aio-libs/aiohttp/issues/5106>`_
|
||||
- Only depend on typing_extensions for Python <3.8
|
||||
`#5107 <https://github.com/aio-libs/aiohttp/issues/5107>`_
|
||||
- Add ABNORMAL_CLOSURE and BAD_GATEWAY to WSCloseCode
|
||||
`#5192 <https://github.com/aio-libs/aiohttp/issues/5192>`_
|
||||
- Fix cookies disappearing from HTTPExceptions.
|
||||
`#5233 <https://github.com/aio-libs/aiohttp/issues/5233>`_
|
||||
- StaticResource prefixes no longer match URLs with a non-folder prefix. For example ``routes.static('/foo', '/foo')`` no longer matches the URL ``/foobar``. Previously, this would attempt to load the file ``/foo/ar``.
|
||||
`#5250 <https://github.com/aio-libs/aiohttp/issues/5250>`_
|
||||
- Acquire the connection before running traces to prevent race condition.
|
||||
`#5259 <https://github.com/aio-libs/aiohttp/issues/5259>`_
|
||||
- Add missing slots to ```_RequestContextManager`` and ``_WSRequestContextManager``
|
||||
`#5329 <https://github.com/aio-libs/aiohttp/issues/5329>`_
|
||||
- Ensure sending a zero byte file does not throw an exception (round 2)
|
||||
`#5380 <https://github.com/aio-libs/aiohttp/issues/5380>`_
|
||||
- Set "text/plain" when data is an empty string in client requests.
|
||||
`#5392 <https://github.com/aio-libs/aiohttp/issues/5392>`_
|
||||
- Stop automatically releasing the ``ClientResponse`` object on calls to the ``ok`` property for the failed requests.
|
||||
`#5403 <https://github.com/aio-libs/aiohttp/issues/5403>`_
|
||||
- Include query parameters from `params` keyword argument in tracing `URL`.
|
||||
`#5432 <https://github.com/aio-libs/aiohttp/issues/5432>`_
|
||||
- Fix annotations
|
||||
`#5466 <https://github.com/aio-libs/aiohttp/issues/5466>`_
|
||||
- Fixed the multipart POST requests processing to always release file
|
||||
descriptors for the ``tempfile.Temporaryfile``-created
|
||||
``_io.BufferedRandom`` instances of files sent within multipart request
|
||||
bodies via HTTP POST requests -- by :user:`webknjaz`.
|
||||
`#5494 <https://github.com/aio-libs/aiohttp/issues/5494>`_
|
||||
- Fix 0 being incorrectly treated as an immediate timeout.
|
||||
`#5527 <https://github.com/aio-libs/aiohttp/issues/5527>`_
|
||||
- Fixes failing tests when an environment variable <scheme>_proxy is set.
|
||||
`#5554 <https://github.com/aio-libs/aiohttp/issues/5554>`_
|
||||
- Replace deprecated app handler design in ``tests/autobahn/server.py`` with call to ``web.run_app``; replace deprecated ``aiohttp.ws_connect`` calls in ``tests/autobahn/client.py`` with ``aiohttp.ClienSession.ws_connect``.
|
||||
`#5606 <https://github.com/aio-libs/aiohttp/issues/5606>`_
|
||||
- Fixed test for ``HTTPUnauthorized`` that access the ``text`` argument. This is not used in any part of the code, so it's removed now.
|
||||
`#5657 <https://github.com/aio-libs/aiohttp/issues/5657>`_
|
||||
- Remove incorrect default from docs
|
||||
`#5727 <https://github.com/aio-libs/aiohttp/issues/5727>`_
|
||||
- Remove external test dependency to http://httpbin.org
|
||||
`#5840 <https://github.com/aio-libs/aiohttp/issues/5840>`_
|
||||
- Don't cancel current task when entering a cancelled timer.
|
||||
`#5853 <https://github.com/aio-libs/aiohttp/issues/5853>`_
|
||||
- Added ``params`` keyword argument to ``ClientSession.ws_connect``. -- :user:`hoh`.
|
||||
`#5868 <https://github.com/aio-libs/aiohttp/issues/5868>`_
|
||||
- Uses :py:class:`~asyncio.ThreadedChildWatcher` under POSIX to allow setting up test loop in non-main thread.
|
||||
`#5877 <https://github.com/aio-libs/aiohttp/issues/5877>`_
|
||||
- Fix the error in handling the return value of `getaddrinfo`.
|
||||
`getaddrinfo` will return an `(int, bytes)` tuple, if CPython could not handle the address family.
|
||||
It will cause a index out of range error in aiohttp. For example, if user compile CPython with
|
||||
`--disable-ipv6` option but his system enable the ipv6.
|
||||
`#5901 <https://github.com/aio-libs/aiohttp/issues/5901>`_
|
||||
- Removed the deprecated ``loop`` argument from the ``asyncio.sleep``/``gather`` calls
|
||||
`#5905 <https://github.com/aio-libs/aiohttp/issues/5905>`_
|
||||
- Return ``None`` from ``request.if_modified_since``, ``request.if_unmodified_since``, ``request.if_range`` and ``response.last_modified`` when corresponding http date headers are invalid.
|
||||
`#5925 <https://github.com/aio-libs/aiohttp/issues/5925>`_
|
||||
- Fix resetting `SIGCHLD` signals in Gunicorn aiohttp Worker to fix `subprocesses` that capture output having an incorrect `returncode`.
|
||||
`#6130 <https://github.com/aio-libs/aiohttp/issues/6130>`_
|
||||
- Raise ``400: Content-Length can't be present with Transfer-Encoding`` if both ``Content-Length`` and ``Transfer-Encoding`` are sent by peer by both C and Python implementations
|
||||
`#6182 <https://github.com/aio-libs/aiohttp/issues/6182>`_
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Refactored OpenAPI/Swagger aiohttp addons, added ``aio-openapi``
|
||||
`#5326 <https://github.com/aio-libs/aiohttp/issues/5326>`_
|
||||
- Fixed docs on request cookies type, so it matches what is actually used in the code (a
|
||||
read-only dictionary-like object).
|
||||
`#5725 <https://github.com/aio-libs/aiohttp/issues/5725>`_
|
||||
- Documented that the HTTP client ``Authorization`` header is removed
|
||||
on redirects to a different host or protocol.
|
||||
`#5850 <https://github.com/aio-libs/aiohttp/issues/5850>`_
|
||||
|
||||
|
||||
Misc
|
||||
----
|
||||
|
||||
- `#3927 <https://github.com/aio-libs/aiohttp/issues/3927>`_, `#4247 <https://github.com/aio-libs/aiohttp/issues/4247>`_, `#4247 <https://github.com/aio-libs/aiohttp/issues/4247>`_, `#5389 <https://github.com/aio-libs/aiohttp/issues/5389>`_, `#5457 <https://github.com/aio-libs/aiohttp/issues/5457>`_, `#5486 <https://github.com/aio-libs/aiohttp/issues/5486>`_, `#5494 <https://github.com/aio-libs/aiohttp/issues/5494>`_, `#5515 <https://github.com/aio-libs/aiohttp/issues/5515>`_, `#5625 <https://github.com/aio-libs/aiohttp/issues/5625>`_, `#5635 <https://github.com/aio-libs/aiohttp/issues/5635>`_, `#5648 <https://github.com/aio-libs/aiohttp/issues/5648>`_, `#5657 <https://github.com/aio-libs/aiohttp/issues/5657>`_, `#5890 <https://github.com/aio-libs/aiohttp/issues/5890>`_, `#5914 <https://github.com/aio-libs/aiohttp/issues/5914>`_, `#5932 <https://github.com/aio-libs/aiohttp/issues/5932>`_, `#6002 <https://github.com/aio-libs/aiohttp/issues/6002>`_, `#6045 <https://github.com/aio-libs/aiohttp/issues/6045>`_, `#6131 <https://github.com/aio-libs/aiohttp/issues/6131>`_, `#6156 <https://github.com/aio-libs/aiohttp/issues/6156>`_, `#6165 <https://github.com/aio-libs/aiohttp/issues/6165>`_, `#6166 <https://github.com/aio-libs/aiohttp/issues/6166>`_
|
||||
|
||||
|
||||
----
|
||||
|
||||
|
||||
3.7.4.post0 (2021-03-06)
|
||||
========================
|
||||
|
||||
|
|
|
@ -3,9 +3,11 @@
|
|||
A. Jesse Jiryu Davis
|
||||
Adam Bannister
|
||||
Adam Cooper
|
||||
Adam Horacek
|
||||
Adam Mills
|
||||
Adrian Krupa
|
||||
Adrián Chaves
|
||||
Ahmed Tahri
|
||||
Alan Tse
|
||||
Alec Hanefeld
|
||||
Alejandro Gómez
|
||||
|
@ -30,6 +32,7 @@ Alexey Stepanov
|
|||
Amin Etesamian
|
||||
Amit Tulshyan
|
||||
Amy Boyle
|
||||
Anas El Amraoui
|
||||
Anders Melchiorsen
|
||||
Andrei Ursulenko
|
||||
Andrej Antonov
|
||||
|
@ -38,21 +41,27 @@ Andrew Lytvyn
|
|||
Andrew Svetlov
|
||||
Andrew Zhou
|
||||
Andrii Soldatenko
|
||||
Anes Abismail
|
||||
Antoine Pietri
|
||||
Anton Kasyanov
|
||||
Anton Zhdan-Pushkin
|
||||
Arseny Timoniq
|
||||
Artem Yushkovskiy
|
||||
Arthur Darcet
|
||||
Austin Scola
|
||||
Ben Bader
|
||||
Ben Greiner
|
||||
Ben Timby
|
||||
Benedikt Reinartz
|
||||
Bob Haddleton
|
||||
Boris Feld
|
||||
Boyi Chen
|
||||
Brett Cannon
|
||||
Brian Bouterse
|
||||
Brian C. Lane
|
||||
Brian Muller
|
||||
Bruce Merry
|
||||
Bruno Souza Cabral
|
||||
Bryan Kok
|
||||
Bryce Drennan
|
||||
Carl George
|
||||
|
@ -67,6 +76,7 @@ Claudiu Popa
|
|||
Colin Dunklau
|
||||
Cong Xu
|
||||
Damien Nadé
|
||||
Dan King
|
||||
Dan Xu
|
||||
Daniel García
|
||||
Daniel Grossmann-Kavanagh
|
||||
|
@ -76,6 +86,7 @@ David Bibb
|
|||
David Michael Brown
|
||||
Denilson Amorim
|
||||
Denis Matiychuk
|
||||
Denis Moshensky
|
||||
Dennis Kliban
|
||||
Dima Veselov
|
||||
Dimitar Dimitrov
|
||||
|
@ -106,6 +117,7 @@ Felix Yan
|
|||
Fernanda Guimarães
|
||||
FichteFoll
|
||||
Florian Scheffler
|
||||
Franek Magiera
|
||||
Frederik Gladhorn
|
||||
Frederik Peter Aalund
|
||||
Gabriel Tremblay
|
||||
|
@ -119,10 +131,13 @@ Gustavo Carneiro
|
|||
Günther Jena
|
||||
Hans Adema
|
||||
Harmon Y.
|
||||
Harry Liu
|
||||
Hiroshi Ogawa
|
||||
Hrishikesh Paranjape
|
||||
Hu Bo
|
||||
Hugh Young
|
||||
Hugo Herter
|
||||
Hugo van Kemenade
|
||||
Hynek Schlawack
|
||||
Igor Alexandrov
|
||||
Igor Davydenko
|
||||
|
@ -137,6 +152,7 @@ Jaesung Lee
|
|||
Jake Davis
|
||||
Jakob Ackermann
|
||||
Jakub Wilk
|
||||
Jan Buchar
|
||||
Jashandeep Sohi
|
||||
Jens Steinhauser
|
||||
Jeonghun Lee
|
||||
|
@ -152,6 +168,7 @@ Jonas Obrist
|
|||
Jonathan Wright
|
||||
Jonny Tan
|
||||
Joongi Kim
|
||||
Jordan Borean
|
||||
Josep Cugat
|
||||
Josh Junon
|
||||
Joshu Coats
|
||||
|
@ -186,6 +203,8 @@ Manuel Miranda
|
|||
Marat Sharafutdinov
|
||||
Marco Paolini
|
||||
Mariano Anaya
|
||||
Mariusz Masztalerczuk
|
||||
Marko Kohtala
|
||||
Martijn Pieters
|
||||
Martin Melka
|
||||
Martin Richard
|
||||
|
@ -193,6 +212,7 @@ Mathias Fröjdman
|
|||
Mathieu Dugré
|
||||
Matthieu Hauglustaine
|
||||
Matthieu Rigal
|
||||
Meet Mangukiya
|
||||
Michael Ihnatenko
|
||||
Michał Górny
|
||||
Mikhail Burshteyn
|
||||
|
@ -208,6 +228,8 @@ Navid Sheikhol
|
|||
Nicolas Braem
|
||||
Nikolay Kim
|
||||
Nikolay Novik
|
||||
Nikolay Tiunov
|
||||
Nándor Mátravölgyi
|
||||
Oisin Aylward
|
||||
Olaf Conradi
|
||||
Pahaz Blinov
|
||||
|
@ -219,11 +241,14 @@ Paulius Šileikis
|
|||
Paulus Schoutsen
|
||||
Pavel Kamaev
|
||||
Pavel Polyakov
|
||||
Pavel Sapezhko
|
||||
Pavol Vargovčík
|
||||
Pawel Kowalski
|
||||
Pawel Miech
|
||||
Pepe Osca
|
||||
Philipp A.
|
||||
Pieter van Beek
|
||||
Qiao Han
|
||||
Rafael Viotti
|
||||
Raphael Bialon
|
||||
Raúl Cumplido
|
||||
|
@ -250,6 +275,7 @@ Stanislav Prokop
|
|||
Stefan Tjarks
|
||||
Stepan Pletnev
|
||||
Stephan Jaensch
|
||||
Stephen Cirelli
|
||||
Stephen Granade
|
||||
Steven Seguin
|
||||
Sunghyun Hwang
|
||||
|
@ -292,6 +318,7 @@ Vladyslav Bondar
|
|||
W. Trevor King
|
||||
Wei Lin
|
||||
Weiwei Wang
|
||||
Will Fatherley
|
||||
Will McGugan
|
||||
Willem de Groot
|
||||
William Grzybowski
|
||||
|
@ -305,8 +332,11 @@ Yegor Roganov
|
|||
Yifei Kong
|
||||
Young-Ho Cha
|
||||
Yuriy Shatrov
|
||||
Yury Pliner
|
||||
Yury Selivanov
|
||||
Yusuke Tsutsumi
|
||||
Yuval Ofir
|
||||
Zeal Wierslee
|
||||
Zlatan Sičanica
|
||||
Марк Коренберг
|
||||
Семён Марьясин
|
||||
|
|
|
@ -1,192 +1,4 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2013-2020 aiohttp maintainers
|
||||
Copyright aio-libs contributors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -7,9 +7,10 @@ CYS := $(wildcard aiohttp/*.pyx) $(wildcard aiohttp/*.pyi) $(wildcard aiohttp/*
|
|||
PYXS := $(wildcard aiohttp/*.pyx)
|
||||
CS := $(wildcard aiohttp/*.c)
|
||||
PYS := $(wildcard aiohttp/*.py)
|
||||
REQS := $(wildcard requirements/*.txt)
|
||||
IN := doc-spelling lint cython dev
|
||||
ALLS := $(sort $(CYS) $(CS) $(PYS) $(REQS))
|
||||
|
||||
|
||||
.PHONY: all
|
||||
all: test
|
||||
|
||||
|
@ -45,9 +46,11 @@ endif
|
|||
# Enumerate intermediate files to don't remove them automatically.
|
||||
.SECONDARY: $(call to-hash,$(ALLS))
|
||||
|
||||
.update-pip:
|
||||
@python -m pip install --upgrade pip
|
||||
|
||||
.install-cython: $(call to-hash,requirements/cython.txt)
|
||||
pip install -r requirements/cython.txt
|
||||
.install-cython: .update-pip $(call to-hash,requirements/cython.txt)
|
||||
@python -m pip install -r requirements/cython.txt -c requirements/constraints.txt
|
||||
@touch .install-cython
|
||||
|
||||
aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py)
|
||||
|
@ -57,12 +60,21 @@ aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py)
|
|||
aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c
|
||||
cython -3 -o $@ $< -I aiohttp
|
||||
|
||||
vendor/llhttp/node_modules: vendor/llhttp/package.json
|
||||
cd vendor/llhttp; npm install
|
||||
|
||||
.llhttp-gen: vendor/llhttp/node_modules
|
||||
$(MAKE) -C vendor/llhttp generate
|
||||
@touch .llhttp-gen
|
||||
|
||||
.PHONY: generate-llhttp
|
||||
generate-llhttp: .llhttp-gen
|
||||
|
||||
.PHONY: cythonize
|
||||
cythonize: .install-cython $(PYXS:.pyx=.c)
|
||||
|
||||
.install-deps: .install-cython $(PYXS:.pyx=.c) $(call to-hash,$(CYS) $(REQS))
|
||||
pip install -r requirements/dev.txt
|
||||
@python -m pip install -r requirements/dev.txt -c requirements/constraints.txt
|
||||
@touch .install-deps
|
||||
|
||||
.PHONY: lint
|
||||
|
@ -74,10 +86,10 @@ fmt format:
|
|||
|
||||
.PHONY: mypy
|
||||
mypy:
|
||||
mypy aiohttp
|
||||
mypy
|
||||
|
||||
.develop: .install-deps $(call to-hash,$(PYS) $(CYS) $(CS))
|
||||
pip install -e .
|
||||
.develop: .install-deps generate-llhttp $(call to-hash,$(PYS) $(CYS) $(CS))
|
||||
python -m pip install -e . -c requirements/constraints.txt
|
||||
@touch .develop
|
||||
|
||||
.PHONY: test
|
||||
|
@ -92,6 +104,30 @@ vtest: .develop
|
|||
vvtest: .develop
|
||||
@pytest -vv
|
||||
|
||||
|
||||
define run_tests_in_docker
|
||||
DOCKER_BUILDKIT=1 docker build --build-arg PYTHON_VERSION=$(1) --build-arg AIOHTTP_NO_EXTENSIONS=$(2) -t "aiohttp-test-$(1)-$(2)" -f tools/testing/Dockerfile .
|
||||
docker run --rm -ti -v `pwd`:/src -w /src "aiohttp-test-$(1)-$(2)" $(TEST_SPEC)
|
||||
endef
|
||||
|
||||
.PHONY: test-3.7-no-extensions test-3.7 test-3.8-no-extensions test-3.8 test-3.9-no-extensions test-3.9 test-3.10-no-extensions test-3.10
|
||||
test-3.7-no-extensions:
|
||||
$(call run_tests_in_docker,3.7,y)
|
||||
test-3.7:
|
||||
$(call run_tests_in_docker,3.7,n)
|
||||
test-3.8-no-extensions:
|
||||
$(call run_tests_in_docker,3.8,y)
|
||||
test-3.8:
|
||||
$(call run_tests_in_docker,3.8,n)
|
||||
test-3.9-no-extensions:
|
||||
$(call run_tests_in_docker,3.9,y)
|
||||
test-3.9:
|
||||
$(call run_tests_in_docker,3.9,n)
|
||||
test-3.10-no-extensions:
|
||||
$(call run_tests_in_docker,3.10,y)
|
||||
test-3.10:
|
||||
$(call run_tests_in_docker,3.10,n)
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
@rm -rf `find . -name __pycache__`
|
||||
|
@ -125,20 +161,28 @@ clean:
|
|||
@rm -rf aiohttp.egg-info
|
||||
@rm -f .install-deps
|
||||
@rm -f .install-cython
|
||||
@rm -rf vendor/llhttp/node_modules
|
||||
@rm -f .llhttp-gen
|
||||
@$(MAKE) -C vendor/llhttp clean
|
||||
|
||||
.PHONY: doc
|
||||
doc:
|
||||
@make -C docs html SPHINXOPTS="-W --keep-going -E"
|
||||
@make -C docs html SPHINXOPTS="-W --keep-going -n -E"
|
||||
@echo "open file://`pwd`/docs/_build/html/index.html"
|
||||
|
||||
.PHONY: doc-spelling
|
||||
doc-spelling:
|
||||
@make -C docs spelling SPHINXOPTS="-W -E"
|
||||
@make -C docs spelling SPHINXOPTS="-W --keep-going -n -E"
|
||||
|
||||
.PHONY: compile-deps
|
||||
compile-deps: .update-pip $(REQS)
|
||||
pip-compile --no-header --allow-unsafe -q --strip-extras \
|
||||
-o requirements/constraints.txt \
|
||||
requirements/constraints.in
|
||||
|
||||
.PHONY: install
|
||||
install:
|
||||
@pip install -U 'pip'
|
||||
@pip install -Ur requirements/dev.txt
|
||||
install: .update-pip
|
||||
@python -m pip install -r requirements/dev.txt -c requirements/constraints.txt
|
||||
|
||||
.PHONY: install-dev
|
||||
install-dev: .develop
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -2,7 +2,7 @@
|
|||
Async http client/server framework
|
||||
==================================
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
|
||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
|
||||
:height: 64px
|
||||
:width: 64px
|
||||
:alt: aiohttp logo
|
||||
|
@ -25,13 +25,13 @@ Async http client/server framework
|
|||
:target: https://docs.aiohttp.org/
|
||||
:alt: Latest Read The Docs
|
||||
|
||||
.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
|
||||
:target: https://aio-libs.discourse.group
|
||||
:alt: Discourse status
|
||||
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
||||
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
||||
:alt: Matrix Room — #aio-libs:matrix.org
|
||||
|
||||
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
||||
:target: https://gitter.im/aio-libs/Lobby
|
||||
:alt: Chat on Gitter
|
||||
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
||||
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
||||
:alt: Matrix Space — #aio-libs-space:matrix.org
|
||||
|
||||
|
||||
Key Features
|
||||
|
@ -67,8 +67,7 @@ To get something from the web:
|
|||
html = await response.text()
|
||||
print("Body:", html[:15], "...")
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main())
|
||||
asyncio.run(main())
|
||||
|
||||
This prints:
|
||||
|
||||
|
@ -161,17 +160,19 @@ Requirements
|
|||
- Python >= 3.6
|
||||
- async-timeout_
|
||||
- attrs_
|
||||
- chardet_
|
||||
- charset-normalizer_
|
||||
- multidict_
|
||||
- yarl_
|
||||
- frozenlist_
|
||||
|
||||
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
||||
recommended for sake of speed).
|
||||
|
||||
.. _chardet: https://pypi.python.org/pypi/chardet
|
||||
.. _charset-normalizer: https://pypi.org/project/charset-normalizer
|
||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
||||
.. _attrs: https://github.com/python-attrs/attrs
|
||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
||||
.. _frozenlist: https://pypi.org/project/frozenlist/
|
||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
||||
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -12,8 +12,6 @@ aiohttp/_cparser.pxd
|
|||
aiohttp/_find_header.c
|
||||
aiohttp/_find_header.h
|
||||
aiohttp/_find_header.pxd
|
||||
aiohttp/_frozenlist.c
|
||||
aiohttp/_frozenlist.pyx
|
||||
aiohttp/_headers.pxi
|
||||
aiohttp/_helpers.c
|
||||
aiohttp/_helpers.pyi
|
||||
|
@ -34,8 +32,6 @@ aiohttp/client_ws.py
|
|||
aiohttp/connector.py
|
||||
aiohttp/cookiejar.py
|
||||
aiohttp/formdata.py
|
||||
aiohttp/frozenlist.py
|
||||
aiohttp/frozenlist.pyi
|
||||
aiohttp/hdrs.py
|
||||
aiohttp/helpers.py
|
||||
aiohttp/http.py
|
||||
|
@ -51,8 +47,6 @@ aiohttp/payload_streamer.py
|
|||
aiohttp/py.typed
|
||||
aiohttp/pytest_plugin.py
|
||||
aiohttp/resolver.py
|
||||
aiohttp/signals.py
|
||||
aiohttp/signals.pyi
|
||||
aiohttp/streams.py
|
||||
aiohttp/tcp_helpers.py
|
||||
aiohttp/test_utils.py
|
||||
|
@ -76,19 +70,17 @@ aiohttp/worker.py
|
|||
aiohttp.egg-info/PKG-INFO
|
||||
aiohttp.egg-info/SOURCES.txt
|
||||
aiohttp.egg-info/dependency_links.txt
|
||||
aiohttp.egg-info/not-zip-safe
|
||||
aiohttp.egg-info/requires.txt
|
||||
aiohttp.egg-info/top_level.txt
|
||||
aiohttp/.hash/_cparser.pxd.hash
|
||||
aiohttp/.hash/_find_header.pxd.hash
|
||||
aiohttp/.hash/_frozenlist.pyx.hash
|
||||
aiohttp/.hash/_helpers.pyi.hash
|
||||
aiohttp/.hash/_helpers.pyx.hash
|
||||
aiohttp/.hash/_http_parser.pyx.hash
|
||||
aiohttp/.hash/_http_writer.pyx.hash
|
||||
aiohttp/.hash/_websocket.pyx.hash
|
||||
aiohttp/.hash/frozenlist.pyi.hash
|
||||
aiohttp/.hash/hdrs.py.hash
|
||||
aiohttp/.hash/signals.pyi.hash
|
||||
docs/Makefile
|
||||
docs/abc.rst
|
||||
docs/aiohttp-icon.svg
|
||||
|
@ -119,7 +111,6 @@ docs/new_router.rst
|
|||
docs/old-logo.png
|
||||
docs/old-logo.svg
|
||||
docs/powered_by.rst
|
||||
docs/signals.rst
|
||||
docs/spelling_wordlist.txt
|
||||
docs/streams.rst
|
||||
docs/structures.rst
|
||||
|
@ -135,7 +126,9 @@ docs/web_reference.rst
|
|||
docs/websocket_utilities.rst
|
||||
docs/whats_new_1_1.rst
|
||||
docs/whats_new_3_0.rst
|
||||
docs/_static/aiohttp-icon-128x128.png
|
||||
docs/_snippets/cchardet-unmaintained-admonition.rst
|
||||
docs/_static/css/logo-adjustments.css
|
||||
examples/__init__.py
|
||||
examples/background_tasks.py
|
||||
examples/cli_app.py
|
||||
examples/client_auth.py
|
||||
|
@ -157,16 +150,16 @@ examples/web_srv_route_deco.py
|
|||
examples/web_srv_route_table.py
|
||||
examples/web_ws.py
|
||||
examples/websocket.html
|
||||
examples/legacy/crawl.py
|
||||
examples/legacy/srv.py
|
||||
examples/legacy/tcp_protocol_parser.py
|
||||
tests/aiohttp.jpg
|
||||
tests/aiohttp.png
|
||||
tests/conftest.py
|
||||
tests/data.unknown_mime_type
|
||||
tests/data.zero_bytes
|
||||
tests/hello.txt.gz
|
||||
tests/sample.txt
|
||||
tests/test___all__.py
|
||||
tests/test_base_protocol.py
|
||||
tests/test_circular_imports.py
|
||||
tests/test_classbasedview.py
|
||||
tests/test_client_connection.py
|
||||
tests/test_client_exceptions.py
|
||||
|
@ -182,7 +175,6 @@ tests/test_connector.py
|
|||
tests/test_cookiejar.py
|
||||
tests/test_flowcontrol_streams.py
|
||||
tests/test_formdata.py
|
||||
tests/test_frozenlist.py
|
||||
tests/test_helpers.py
|
||||
tests/test_http_exceptions.py
|
||||
tests/test_http_parser.py
|
||||
|
@ -198,7 +190,6 @@ tests/test_pytest_plugin.py
|
|||
tests/test_resolver.py
|
||||
tests/test_route_def.py
|
||||
tests/test_run_app.py
|
||||
tests/test_signals.py
|
||||
tests/test_streams.py
|
||||
tests/test_tcp_helpers.py
|
||||
tests/test_test_utils.py
|
||||
|
@ -210,7 +201,6 @@ tests/test_web_exceptions.py
|
|||
tests/test_web_functional.py
|
||||
tests/test_web_log.py
|
||||
tests/test_web_middleware.py
|
||||
tests/test_web_protocol.py
|
||||
tests/test_web_request.py
|
||||
tests/test_web_request_handler.py
|
||||
tests/test_web_response.py
|
||||
|
@ -225,22 +215,76 @@ tests/test_websocket_handshake.py
|
|||
tests/test_websocket_parser.py
|
||||
tests/test_websocket_writer.py
|
||||
tests/test_worker.py
|
||||
tests/autobahn/client.py
|
||||
tests/autobahn/fuzzingclient.json
|
||||
tests/autobahn/fuzzingserver.json
|
||||
tests/autobahn/server.py
|
||||
vendor/http-parser/.git
|
||||
vendor/http-parser/.gitignore
|
||||
vendor/http-parser/.mailmap
|
||||
vendor/http-parser/.travis.yml
|
||||
vendor/http-parser/AUTHORS
|
||||
vendor/http-parser/LICENSE-MIT
|
||||
vendor/http-parser/Makefile
|
||||
vendor/http-parser/README.md
|
||||
vendor/http-parser/bench.c
|
||||
vendor/http-parser/http_parser.c
|
||||
vendor/http-parser/http_parser.gyp
|
||||
vendor/http-parser/http_parser.h
|
||||
vendor/http-parser/test.c
|
||||
vendor/http-parser/contrib/parsertrace.c
|
||||
vendor/http-parser/contrib/url_parser.c
|
||||
tests/autobahn/Dockerfile.aiohttp
|
||||
tests/autobahn/Dockerfile.autobahn
|
||||
tests/autobahn/test_autobahn.py
|
||||
tests/autobahn/client/client.py
|
||||
tests/autobahn/client/fuzzingserver.json
|
||||
tests/autobahn/server/fuzzingclient.json
|
||||
tests/autobahn/server/server.py
|
||||
vendor/README.rst
|
||||
vendor/llhttp/.dockerignore
|
||||
vendor/llhttp/.eslintrc.js
|
||||
vendor/llhttp/.git
|
||||
vendor/llhttp/.gitignore
|
||||
vendor/llhttp/.npmrc
|
||||
vendor/llhttp/CMakeLists.txt
|
||||
vendor/llhttp/CNAME
|
||||
vendor/llhttp/CODE_OF_CONDUCT.md
|
||||
vendor/llhttp/Dockerfile
|
||||
vendor/llhttp/LICENSE-MIT
|
||||
vendor/llhttp/Makefile
|
||||
vendor/llhttp/README.md
|
||||
vendor/llhttp/_config.yml
|
||||
vendor/llhttp/libllhttp.pc.in
|
||||
vendor/llhttp/package-lock.json
|
||||
vendor/llhttp/package.json
|
||||
vendor/llhttp/tsconfig.json
|
||||
vendor/llhttp/tslint.json
|
||||
vendor/llhttp/.github/workflows/ci.yaml
|
||||
vendor/llhttp/bench/index.ts
|
||||
vendor/llhttp/bin/build_wasm.ts
|
||||
vendor/llhttp/bin/generate.ts
|
||||
vendor/llhttp/build/llhttp.h
|
||||
vendor/llhttp/build/c/llhttp.c
|
||||
vendor/llhttp/docs/releasing.md
|
||||
vendor/llhttp/examples/wasm.ts
|
||||
vendor/llhttp/images/http-loose-none.png
|
||||
vendor/llhttp/images/http-strict-none.png
|
||||
vendor/llhttp/src/common.gypi
|
||||
vendor/llhttp/src/llhttp.gyp
|
||||
vendor/llhttp/src/llhttp.ts
|
||||
vendor/llhttp/src/llhttp/c-headers.ts
|
||||
vendor/llhttp/src/llhttp/constants.ts
|
||||
vendor/llhttp/src/llhttp/http.ts
|
||||
vendor/llhttp/src/llhttp/url.ts
|
||||
vendor/llhttp/src/llhttp/utils.ts
|
||||
vendor/llhttp/src/native/api.c
|
||||
vendor/llhttp/src/native/api.h
|
||||
vendor/llhttp/src/native/http.c
|
||||
vendor/llhttp/test/md-test.ts
|
||||
vendor/llhttp/test/url.md
|
||||
vendor/llhttp/test/fixtures/extra.c
|
||||
vendor/llhttp/test/fixtures/index.ts
|
||||
vendor/llhttp/test/fuzzers/fuzz_parser.c
|
||||
vendor/llhttp/test/request/connection.md
|
||||
vendor/llhttp/test/request/content-length.md
|
||||
vendor/llhttp/test/request/finish.md
|
||||
vendor/llhttp/test/request/invalid.md
|
||||
vendor/llhttp/test/request/lenient-headers.md
|
||||
vendor/llhttp/test/request/lenient-version.md
|
||||
vendor/llhttp/test/request/method.md
|
||||
vendor/llhttp/test/request/pausing.md
|
||||
vendor/llhttp/test/request/pipelining.md
|
||||
vendor/llhttp/test/request/sample.md
|
||||
vendor/llhttp/test/request/transfer-encoding.md
|
||||
vendor/llhttp/test/request/uri.md
|
||||
vendor/llhttp/test/response/connection.md
|
||||
vendor/llhttp/test/response/content-length.md
|
||||
vendor/llhttp/test/response/finish.md
|
||||
vendor/llhttp/test/response/invalid.md
|
||||
vendor/llhttp/test/response/lenient-version.md
|
||||
vendor/llhttp/test/response/pausing.md
|
||||
vendor/llhttp/test/response/pipelining.md
|
||||
vendor/llhttp/test/response/sample.md
|
||||
vendor/llhttp/test/response/transfer-encoding.md
|
|
@ -0,0 +1 @@
|
|||
|
|
@ -1,14 +1,21 @@
|
|||
attrs>=17.3.0
|
||||
chardet<5.0,>=2.0
|
||||
charset-normalizer<4.0,>=2.0
|
||||
multidict<7.0,>=4.5
|
||||
async_timeout<4.0,>=3.0
|
||||
async_timeout<5.0,>=4.0.0a3
|
||||
yarl<2.0,>=1.0
|
||||
typing_extensions>=3.6.5
|
||||
frozenlist>=1.1.1
|
||||
aiosignal>=1.1.2
|
||||
|
||||
[:python_version < "3.7"]
|
||||
idna-ssl>=1.0
|
||||
|
||||
[:python_version < "3.8"]
|
||||
asynctest==0.13.0
|
||||
typing_extensions>=3.7.4
|
||||
|
||||
[speedups]
|
||||
aiodns
|
||||
brotlipy
|
||||
Brotli
|
||||
|
||||
[speedups:python_version < "3.10"]
|
||||
cchardet
|
||||
|
|
|
@ -1 +1 @@
|
|||
b60c37d122fa91049ccf318c94c871d82ba17ff3bc3fc64f8a65426fce7120b7 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
||||
e6d134d56d5f516ab2b5c3b295d0d440a3bef911f4384d506204018895a1f833 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
043f0b704444c6c59da38ab3bae43ce1ff8bfe91d5ce45103b494400e7b71688 /home/runner/work/aiohttp/aiohttp/aiohttp/_frozenlist.pyx
|
|
@ -1 +1 @@
|
|||
f0688fb2e81ea92bf0a17822260d9591a30979101da12a4b873113fc459fb5fa /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
||||
43bc2c42b9dbb09c19d0782c7aefd1a656a039b31c57a9fa809f82c2807eeaa9 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
||||
|
|
|
@ -1 +1 @@
|
|||
4e7b7f7baa5c65954e85a5b7c8db7786a0ec3498081b0a9420f792a803086281 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
||||
6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
6d134aa08da3d6ba0f76d81fc7f9ec7836a7bc1a99b1950d1c3aa65ed7e3951a /home/runner/work/aiohttp/aiohttp/aiohttp/frozenlist.pyi
|
|
@ -1 +1 @@
|
|||
5ac8c3258003604c8993bfa8357361036337330b722e4849024972ccbb5c95f5 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
||||
a30351c34760a1d7835b2a1b0552e463cf1d2db90da0cdb473313dc66e34a031 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
48b4df50f771d7e8385524ea0a7057ca1482974f8a43e674982b04b08bc17d5e /home/runner/work/aiohttp/aiohttp/aiohttp/signals.pyi
|
|
@ -1,4 +1,4 @@
|
|||
__version__ = "3.7.4.post0"
|
||||
__version__ = "3.8.5"
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
|
@ -38,7 +38,7 @@ from .client import (
|
|||
)
|
||||
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
||||
from .formdata import FormData as FormData
|
||||
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
|
||||
from .helpers import BasicAuth, ChainMapProxy, ETag
|
||||
from .http import (
|
||||
HttpVersion as HttpVersion,
|
||||
HttpVersion10 as HttpVersion10,
|
||||
|
@ -78,7 +78,6 @@ from .resolver import (
|
|||
DefaultResolver as DefaultResolver,
|
||||
ThreadedResolver as ThreadedResolver,
|
||||
)
|
||||
from .signals import Signal as Signal
|
||||
from .streams import (
|
||||
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
||||
DataQueue as DataQueue,
|
||||
|
@ -147,6 +146,7 @@ __all__: Tuple[str, ...] = (
|
|||
# helpers
|
||||
"BasicAuth",
|
||||
"ChainMapProxy",
|
||||
"ETag",
|
||||
# http
|
||||
"HttpVersion",
|
||||
"HttpVersion10",
|
||||
|
@ -183,8 +183,7 @@ __all__: Tuple[str, ...] = (
|
|||
"AsyncResolver",
|
||||
"DefaultResolver",
|
||||
"ThreadedResolver",
|
||||
# signals
|
||||
"Signal",
|
||||
# streams
|
||||
"DataQueue",
|
||||
"EMPTY_PAYLOAD",
|
||||
"EofStream",
|
||||
|
|
|
@ -1,140 +1,190 @@
|
|||
from libc.stdint cimport uint16_t, uint32_t, uint64_t
|
||||
from libc.stdint cimport (
|
||||
int8_t,
|
||||
int16_t,
|
||||
int32_t,
|
||||
int64_t,
|
||||
uint8_t,
|
||||
uint16_t,
|
||||
uint32_t,
|
||||
uint64_t,
|
||||
)
|
||||
|
||||
|
||||
cdef extern from "../vendor/http-parser/http_parser.h":
|
||||
ctypedef int (*http_data_cb) (http_parser*,
|
||||
const char *at,
|
||||
size_t length) except -1
|
||||
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
||||
|
||||
ctypedef int (*http_cb) (http_parser*) except -1
|
||||
|
||||
struct http_parser:
|
||||
unsigned int type
|
||||
unsigned int flags
|
||||
unsigned int state
|
||||
unsigned int header_state
|
||||
unsigned int index
|
||||
|
||||
uint32_t nread
|
||||
struct llhttp__internal_s:
|
||||
int32_t _index
|
||||
void* _span_pos0
|
||||
void* _span_cb0
|
||||
int32_t error
|
||||
const char* reason
|
||||
const char* error_pos
|
||||
void* data
|
||||
void* _current
|
||||
uint64_t content_length
|
||||
uint8_t type
|
||||
uint8_t method
|
||||
uint8_t http_major
|
||||
uint8_t http_minor
|
||||
uint8_t header_state
|
||||
uint8_t lenient_flags
|
||||
uint8_t upgrade
|
||||
uint8_t finish
|
||||
uint16_t flags
|
||||
uint16_t status_code
|
||||
void* settings
|
||||
|
||||
unsigned short http_major
|
||||
unsigned short http_minor
|
||||
unsigned int status_code
|
||||
unsigned int method
|
||||
unsigned int http_errno
|
||||
ctypedef llhttp__internal_s llhttp__internal_t
|
||||
ctypedef llhttp__internal_t llhttp_t
|
||||
|
||||
unsigned int upgrade
|
||||
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
||||
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
||||
|
||||
void *data
|
||||
struct llhttp_settings_s:
|
||||
llhttp_cb on_message_begin
|
||||
llhttp_data_cb on_url
|
||||
llhttp_data_cb on_status
|
||||
llhttp_data_cb on_header_field
|
||||
llhttp_data_cb on_header_value
|
||||
llhttp_cb on_headers_complete
|
||||
llhttp_data_cb on_body
|
||||
llhttp_cb on_message_complete
|
||||
llhttp_cb on_chunk_header
|
||||
llhttp_cb on_chunk_complete
|
||||
|
||||
struct http_parser_settings:
|
||||
http_cb on_message_begin
|
||||
http_data_cb on_url
|
||||
http_data_cb on_status
|
||||
http_data_cb on_header_field
|
||||
http_data_cb on_header_value
|
||||
http_cb on_headers_complete
|
||||
http_data_cb on_body
|
||||
http_cb on_message_complete
|
||||
http_cb on_chunk_header
|
||||
http_cb on_chunk_complete
|
||||
llhttp_cb on_url_complete
|
||||
llhttp_cb on_status_complete
|
||||
llhttp_cb on_header_field_complete
|
||||
llhttp_cb on_header_value_complete
|
||||
|
||||
enum http_parser_type:
|
||||
ctypedef llhttp_settings_s llhttp_settings_t
|
||||
|
||||
enum llhttp_errno:
|
||||
HPE_OK,
|
||||
HPE_INTERNAL,
|
||||
HPE_STRICT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_UNEXPECTED_CONTENT_LENGTH,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_INVALID_TRANSFER_ENCODING,
|
||||
HPE_CB_MESSAGE_BEGIN,
|
||||
HPE_CB_HEADERS_COMPLETE,
|
||||
HPE_CB_MESSAGE_COMPLETE,
|
||||
HPE_CB_CHUNK_HEADER,
|
||||
HPE_CB_CHUNK_COMPLETE,
|
||||
HPE_PAUSED,
|
||||
HPE_PAUSED_UPGRADE,
|
||||
HPE_USER
|
||||
|
||||
ctypedef llhttp_errno llhttp_errno_t
|
||||
|
||||
enum llhttp_flags:
|
||||
F_CONNECTION_KEEP_ALIVE,
|
||||
F_CONNECTION_CLOSE,
|
||||
F_CONNECTION_UPGRADE,
|
||||
F_CHUNKED,
|
||||
F_UPGRADE,
|
||||
F_CONTENT_LENGTH,
|
||||
F_SKIPBODY,
|
||||
F_TRAILING,
|
||||
F_TRANSFER_ENCODING
|
||||
|
||||
enum llhttp_lenient_flags:
|
||||
LENIENT_HEADERS,
|
||||
LENIENT_CHUNKED_LENGTH
|
||||
|
||||
enum llhttp_type:
|
||||
HTTP_REQUEST,
|
||||
HTTP_RESPONSE,
|
||||
HTTP_BOTH
|
||||
|
||||
enum http_errno:
|
||||
HPE_OK,
|
||||
HPE_CB_message_begin,
|
||||
HPE_CB_url,
|
||||
HPE_CB_header_field,
|
||||
HPE_CB_header_value,
|
||||
HPE_CB_headers_complete,
|
||||
HPE_CB_body,
|
||||
HPE_CB_message_complete,
|
||||
HPE_CB_status,
|
||||
HPE_CB_chunk_header,
|
||||
HPE_CB_chunk_complete,
|
||||
HPE_INVALID_EOF_STATE,
|
||||
HPE_HEADER_OVERFLOW,
|
||||
HPE_CLOSED_CONNECTION,
|
||||
HPE_INVALID_VERSION,
|
||||
HPE_INVALID_STATUS,
|
||||
HPE_INVALID_METHOD,
|
||||
HPE_INVALID_URL,
|
||||
HPE_INVALID_HOST,
|
||||
HPE_INVALID_PORT,
|
||||
HPE_INVALID_PATH,
|
||||
HPE_INVALID_QUERY_STRING,
|
||||
HPE_INVALID_FRAGMENT,
|
||||
HPE_LF_EXPECTED,
|
||||
HPE_INVALID_HEADER_TOKEN,
|
||||
HPE_INVALID_CONTENT_LENGTH,
|
||||
HPE_INVALID_CHUNK_SIZE,
|
||||
HPE_INVALID_CONSTANT,
|
||||
HPE_INVALID_INTERNAL_STATE,
|
||||
HPE_STRICT,
|
||||
HPE_PAUSED,
|
||||
HPE_UNKNOWN
|
||||
enum llhttp_finish_t:
|
||||
HTTP_FINISH_SAFE,
|
||||
HTTP_FINISH_SAFE_WITH_CB,
|
||||
HTTP_FINISH_UNSAFE
|
||||
|
||||
enum flags:
|
||||
F_CHUNKED,
|
||||
F_CONNECTION_KEEP_ALIVE,
|
||||
F_CONNECTION_CLOSE,
|
||||
F_CONNECTION_UPGRADE,
|
||||
F_TRAILING,
|
||||
F_UPGRADE,
|
||||
F_SKIPBODY,
|
||||
F_CONTENTLENGTH
|
||||
enum llhttp_method:
|
||||
HTTP_DELETE,
|
||||
HTTP_GET,
|
||||
HTTP_HEAD,
|
||||
HTTP_POST,
|
||||
HTTP_PUT,
|
||||
HTTP_CONNECT,
|
||||
HTTP_OPTIONS,
|
||||
HTTP_TRACE,
|
||||
HTTP_COPY,
|
||||
HTTP_LOCK,
|
||||
HTTP_MKCOL,
|
||||
HTTP_MOVE,
|
||||
HTTP_PROPFIND,
|
||||
HTTP_PROPPATCH,
|
||||
HTTP_SEARCH,
|
||||
HTTP_UNLOCK,
|
||||
HTTP_BIND,
|
||||
HTTP_REBIND,
|
||||
HTTP_UNBIND,
|
||||
HTTP_ACL,
|
||||
HTTP_REPORT,
|
||||
HTTP_MKACTIVITY,
|
||||
HTTP_CHECKOUT,
|
||||
HTTP_MERGE,
|
||||
HTTP_MSEARCH,
|
||||
HTTP_NOTIFY,
|
||||
HTTP_SUBSCRIBE,
|
||||
HTTP_UNSUBSCRIBE,
|
||||
HTTP_PATCH,
|
||||
HTTP_PURGE,
|
||||
HTTP_MKCALENDAR,
|
||||
HTTP_LINK,
|
||||
HTTP_UNLINK,
|
||||
HTTP_SOURCE,
|
||||
HTTP_PRI,
|
||||
HTTP_DESCRIBE,
|
||||
HTTP_ANNOUNCE,
|
||||
HTTP_SETUP,
|
||||
HTTP_PLAY,
|
||||
HTTP_PAUSE,
|
||||
HTTP_TEARDOWN,
|
||||
HTTP_GET_PARAMETER,
|
||||
HTTP_SET_PARAMETER,
|
||||
HTTP_REDIRECT,
|
||||
HTTP_RECORD,
|
||||
HTTP_FLUSH
|
||||
|
||||
enum http_method:
|
||||
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
|
||||
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
|
||||
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
|
||||
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
|
||||
LINK, UNLINK
|
||||
ctypedef llhttp_method llhttp_method_t;
|
||||
|
||||
void http_parser_init(http_parser *parser, http_parser_type type)
|
||||
void llhttp_settings_init(llhttp_settings_t* settings)
|
||||
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
||||
const llhttp_settings_t* settings)
|
||||
|
||||
size_t http_parser_execute(http_parser *parser,
|
||||
const http_parser_settings *settings,
|
||||
const char *data,
|
||||
size_t len)
|
||||
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
||||
llhttp_errno_t llhttp_finish(llhttp_t* parser)
|
||||
|
||||
int http_should_keep_alive(const http_parser *parser)
|
||||
int llhttp_message_needs_eof(const llhttp_t* parser)
|
||||
|
||||
void http_parser_settings_init(http_parser_settings *settings)
|
||||
int llhttp_should_keep_alive(const llhttp_t* parser)
|
||||
|
||||
const char *http_errno_name(http_errno err)
|
||||
const char *http_errno_description(http_errno err)
|
||||
const char *http_method_str(http_method m)
|
||||
void llhttp_pause(llhttp_t* parser)
|
||||
void llhttp_resume(llhttp_t* parser)
|
||||
|
||||
# URL Parser
|
||||
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
||||
|
||||
enum http_parser_url_fields:
|
||||
UF_SCHEMA = 0,
|
||||
UF_HOST = 1,
|
||||
UF_PORT = 2,
|
||||
UF_PATH = 3,
|
||||
UF_QUERY = 4,
|
||||
UF_FRAGMENT = 5,
|
||||
UF_USERINFO = 6,
|
||||
UF_MAX = 7
|
||||
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
||||
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
||||
void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
|
||||
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
||||
const char* llhttp_errno_name(llhttp_errno_t err)
|
||||
|
||||
struct http_parser_url_field_data:
|
||||
uint16_t off
|
||||
uint16_t len
|
||||
const char* llhttp_method_name(llhttp_method_t method)
|
||||
|
||||
struct http_parser_url:
|
||||
uint16_t field_set
|
||||
uint16_t port
|
||||
http_parser_url_field_data[<int>UF_MAX] field_data
|
||||
|
||||
void http_parser_url_init(http_parser_url *u)
|
||||
|
||||
int http_parser_parse_url(const char *buf,
|
||||
size_t buflen,
|
||||
int is_connect,
|
||||
http_parser_url *u)
|
||||
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
||||
void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
/* Generated by Cython 0.29.21 */
|
||||
/* Generated by Cython 0.29.32 */
|
||||
|
||||
#ifndef PY_SSIZE_T_CLEAN
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#endif /* PY_SSIZE_T_CLEAN */
|
||||
#include "Python.h"
|
||||
#ifndef Py_PYTHON_H
|
||||
#error Python headers needed to compile C extensions, please install development version of Python.
|
||||
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
||||
#error Cython requires Python 2.6+ or Python 3.3+.
|
||||
#else
|
||||
#define CYTHON_ABI "0_29_21"
|
||||
#define CYTHON_HEX_VERSION 0x001D15F0
|
||||
#define CYTHON_ABI "0_29_32"
|
||||
#define CYTHON_HEX_VERSION 0x001D20F0
|
||||
#define CYTHON_FUTURE_DIVISION 1
|
||||
#include <stddef.h>
|
||||
#ifndef offsetof
|
||||
|
@ -47,6 +49,7 @@
|
|||
#define CYTHON_COMPILING_IN_PYPY 1
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#undef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 0
|
||||
#undef CYTHON_USE_PYTYPE_LOOKUP
|
||||
|
@ -83,10 +86,14 @@
|
|||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900)
|
||||
#endif
|
||||
#elif defined(PYSTON_VERSION)
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 1
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
|
@ -124,10 +131,59 @@
|
|||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
|
||||
#endif
|
||||
#elif defined(PY_NOGIL)
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 1
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_PYTYPE_LOOKUP
|
||||
#define CYTHON_USE_PYTYPE_LOOKUP 0
|
||||
#ifndef CYTHON_USE_ASYNC_SLOTS
|
||||
#define CYTHON_USE_ASYNC_SLOTS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_PYLIST_INTERNALS
|
||||
#define CYTHON_USE_PYLIST_INTERNALS 0
|
||||
#ifndef CYTHON_USE_UNICODE_INTERNALS
|
||||
#define CYTHON_USE_UNICODE_INTERNALS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_UNICODE_WRITER
|
||||
#define CYTHON_USE_UNICODE_WRITER 0
|
||||
#undef CYTHON_USE_PYLONG_INTERNALS
|
||||
#define CYTHON_USE_PYLONG_INTERNALS 0
|
||||
#ifndef CYTHON_AVOID_BORROWED_REFS
|
||||
#define CYTHON_AVOID_BORROWED_REFS 0
|
||||
#endif
|
||||
#ifndef CYTHON_ASSUME_SAFE_MACROS
|
||||
#define CYTHON_ASSUME_SAFE_MACROS 1
|
||||
#endif
|
||||
#ifndef CYTHON_UNPACK_METHODS
|
||||
#define CYTHON_UNPACK_METHODS 1
|
||||
#endif
|
||||
#undef CYTHON_FAST_THREAD_STATE
|
||||
#define CYTHON_FAST_THREAD_STATE 0
|
||||
#undef CYTHON_FAST_PYCALL
|
||||
#define CYTHON_FAST_PYCALL 0
|
||||
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
#define CYTHON_PEP489_MULTI_PHASE_INIT 1
|
||||
#endif
|
||||
#ifndef CYTHON_USE_TP_FINALIZE
|
||||
#define CYTHON_USE_TP_FINALIZE 1
|
||||
#endif
|
||||
#undef CYTHON_USE_DICT_VERSIONS
|
||||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#else
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 1
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
|
@ -155,7 +211,7 @@
|
|||
#ifndef CYTHON_USE_UNICODE_INTERNALS
|
||||
#define CYTHON_USE_UNICODE_INTERNALS 1
|
||||
#endif
|
||||
#if PY_VERSION_HEX < 0x030300F0
|
||||
#if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
|
||||
#undef CYTHON_USE_UNICODE_WRITER
|
||||
#define CYTHON_USE_UNICODE_WRITER 0
|
||||
#elif !defined(CYTHON_USE_UNICODE_WRITER)
|
||||
|
@ -170,11 +226,14 @@
|
|||
#ifndef CYTHON_UNPACK_METHODS
|
||||
#define CYTHON_UNPACK_METHODS 1
|
||||
#endif
|
||||
#ifndef CYTHON_FAST_THREAD_STATE
|
||||
#if PY_VERSION_HEX >= 0x030B00A4
|
||||
#undef CYTHON_FAST_THREAD_STATE
|
||||
#define CYTHON_FAST_THREAD_STATE 0
|
||||
#elif !defined(CYTHON_FAST_THREAD_STATE)
|
||||
#define CYTHON_FAST_THREAD_STATE 1
|
||||
#endif
|
||||
#ifndef CYTHON_FAST_PYCALL
|
||||
#define CYTHON_FAST_PYCALL 1
|
||||
#define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030A0000)
|
||||
#endif
|
||||
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
|
||||
|
@ -185,15 +244,23 @@
|
|||
#ifndef CYTHON_USE_DICT_VERSIONS
|
||||
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
|
||||
#endif
|
||||
#ifndef CYTHON_USE_EXC_INFO_STACK
|
||||
#if PY_VERSION_HEX >= 0x030B00A4
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#elif !defined(CYTHON_USE_EXC_INFO_STACK)
|
||||
#define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
|
||||
#endif
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC 1
|
||||
#endif
|
||||
#endif
|
||||
#if !defined(CYTHON_FAST_PYCCALL)
|
||||
#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
|
||||
#endif
|
||||
#if CYTHON_USE_PYLONG_INTERNALS
|
||||
#include "longintrepr.h"
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
#include "longintrepr.h"
|
||||
#endif
|
||||
#undef SHIFT
|
||||
#undef BASE
|
||||
#undef MASK
|
||||
|
@ -310,9 +377,68 @@
|
|||
#define __Pyx_DefaultClassType PyClass_Type
|
||||
#else
|
||||
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
||||
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
|
||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||
#define __Pyx_DefaultClassType PyType_Type
|
||||
#if PY_VERSION_HEX >= 0x030B00A1
|
||||
static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f,
|
||||
PyObject *code, PyObject *c, PyObject* n, PyObject *v,
|
||||
PyObject *fv, PyObject *cell, PyObject* fn,
|
||||
PyObject *name, int fline, PyObject *lnos) {
|
||||
PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL;
|
||||
PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL;
|
||||
const char *fn_cstr=NULL;
|
||||
const char *name_cstr=NULL;
|
||||
PyCodeObject* co=NULL;
|
||||
PyObject *type, *value, *traceback;
|
||||
PyErr_Fetch(&type, &value, &traceback);
|
||||
if (!(kwds=PyDict_New())) goto end;
|
||||
if (!(argcount=PyLong_FromLong(a))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end;
|
||||
if (!(posonlyargcount=PyLong_FromLong(0))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end;
|
||||
if (!(kwonlyargcount=PyLong_FromLong(k))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end;
|
||||
if (!(nlocals=PyLong_FromLong(l))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end;
|
||||
if (!(stacksize=PyLong_FromLong(s))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end;
|
||||
if (!(flags=PyLong_FromLong(f))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end;
|
||||
if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end;
|
||||
if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end;
|
||||
if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end;
|
||||
if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too;
|
||||
if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here
|
||||
if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too;
|
||||
Py_XDECREF((PyObject*)co);
|
||||
co = (PyCodeObject*)call_result;
|
||||
call_result = NULL;
|
||||
if (0) {
|
||||
cleanup_code_too:
|
||||
Py_XDECREF((PyObject*)co);
|
||||
co = NULL;
|
||||
}
|
||||
end:
|
||||
Py_XDECREF(kwds);
|
||||
Py_XDECREF(argcount);
|
||||
Py_XDECREF(posonlyargcount);
|
||||
Py_XDECREF(kwonlyargcount);
|
||||
Py_XDECREF(nlocals);
|
||||
Py_XDECREF(stacksize);
|
||||
Py_XDECREF(replace);
|
||||
Py_XDECREF(call_result);
|
||||
Py_XDECREF(empty);
|
||||
if (type) {
|
||||
PyErr_Restore(type, value, traceback);
|
||||
}
|
||||
return co;
|
||||
}
|
||||
#else
|
||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||
|
@ -426,8 +552,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#endif
|
||||
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
|
||||
#define CYTHON_PEP393_ENABLED 1
|
||||
#if defined(PyUnicode_IS_READY)
|
||||
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
|
||||
0 : _PyUnicode_Ready((PyObject *)(op)))
|
||||
#else
|
||||
#define __Pyx_PyUnicode_READY(op) (0)
|
||||
#endif
|
||||
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
|
||||
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
|
||||
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
|
||||
|
@ -436,7 +566,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
|
||||
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
|
||||
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
|
||||
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
|
||||
#else
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
|
||||
#endif
|
||||
#else
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
|
||||
#endif
|
||||
|
@ -542,10 +676,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#if PY_VERSION_HEX < 0x030200A4
|
||||
typedef long Py_hash_t;
|
||||
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
|
||||
#define __Pyx_PyInt_AsHash_t PyInt_AsLong
|
||||
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
|
||||
#else
|
||||
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
|
||||
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
|
||||
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
|
||||
#endif
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
|
||||
|
@ -570,8 +704,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
} __Pyx_PyAsyncMethodsStruct;
|
||||
#endif
|
||||
|
||||
#if defined(WIN32) || defined(MS_WINDOWS)
|
||||
#define _USE_MATH_DEFINES
|
||||
#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS)
|
||||
#if !defined(_USE_MATH_DEFINES)
|
||||
#define _USE_MATH_DEFINES
|
||||
#endif
|
||||
#endif
|
||||
#include <math.h>
|
||||
#ifdef NAN
|
||||
|
@ -701,6 +837,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
|
|||
(likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
|
||||
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
|
||||
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
|
||||
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
|
||||
#if CYTHON_ASSUME_SAFE_MACROS
|
||||
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
|
||||
#else
|
||||
|
@ -1011,13 +1148,21 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
|
|||
#ifndef Py_MEMBER_SIZE
|
||||
#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
|
||||
#endif
|
||||
#if CYTHON_FAST_PYCALL
|
||||
static size_t __pyx_pyframe_localsplus_offset = 0;
|
||||
#include "frameobject.h"
|
||||
#if PY_VERSION_HEX >= 0x030b00a6
|
||||
#ifndef Py_BUILD_CORE
|
||||
#define Py_BUILD_CORE 1
|
||||
#endif
|
||||
#include "internal/pycore_frame.h"
|
||||
#endif
|
||||
#define __Pxy_PyFrame_Initialize_Offsets()\
|
||||
((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
|
||||
(void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
|
||||
#define __Pyx_PyFrame_GetLocalsplus(frame)\
|
||||
(assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
|
||||
#endif // CYTHON_FAST_PYCALL
|
||||
#endif
|
||||
|
||||
/* PyObjectCall.proto */
|
||||
|
@ -1119,6 +1264,12 @@ static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_ve
|
|||
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
|
||||
#endif
|
||||
|
||||
/* PySequenceContains.proto */
|
||||
static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) {
|
||||
int result = PySequence_Contains(seq, item);
|
||||
return unlikely(result < 0) ? result : (result == (eq == Py_EQ));
|
||||
}
|
||||
|
||||
/* Import.proto */
|
||||
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
|
||||
|
||||
|
@ -1174,12 +1325,17 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
|
|||
static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
||||
int py_line, const char *filename);
|
||||
|
||||
/* CIntToPy.proto */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
|
||||
/* GCCDiagnostics.proto */
|
||||
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
|
||||
#define __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#endif
|
||||
|
||||
/* CIntFromPy.proto */
|
||||
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
|
||||
|
||||
/* CIntToPy.proto */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
|
||||
|
||||
/* CIntFromPy.proto */
|
||||
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
|
||||
|
||||
|
@ -1244,9 +1400,9 @@ static const char __pyx_k_aiohttp__helpers[] = "aiohttp._helpers";
|
|||
static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
|
||||
static const char __pyx_k_pyx_unpickle_reify[] = "__pyx_unpickle_reify";
|
||||
static const char __pyx_k_reified_property_is_read_only[] = "reified property is read-only";
|
||||
static const char __pyx_k_Incompatible_checksums_s_vs_0x77[] = "Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))";
|
||||
static const char __pyx_k_Incompatible_checksums_0x_x_vs_0[] = "Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))";
|
||||
static PyObject *__pyx_n_s_AttributeError;
|
||||
static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x77;
|
||||
static PyObject *__pyx_kp_s_Incompatible_checksums_0x_x_vs_0;
|
||||
static PyObject *__pyx_n_s_KeyError;
|
||||
static PyObject *__pyx_n_s_PickleError;
|
||||
static PyObject *__pyx_n_s_aiohttp__helpers;
|
||||
|
@ -1285,10 +1441,13 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __
|
|||
static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct __pyx_obj_7aiohttp_8_helpers_reify *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */
|
||||
static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */
|
||||
static PyObject *__pyx_tp_new_7aiohttp_8_helpers_reify(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
|
||||
static PyObject *__pyx_int_88416349;
|
||||
static PyObject *__pyx_int_124832655;
|
||||
static PyObject *__pyx_int_250410337;
|
||||
static PyObject *__pyx_tuple_;
|
||||
static PyObject *__pyx_tuple__2;
|
||||
static PyObject *__pyx_codeobj__3;
|
||||
static PyObject *__pyx_tuple__3;
|
||||
static PyObject *__pyx_codeobj__4;
|
||||
/* Late includes */
|
||||
|
||||
/* "aiohttp/_helpers.pyx":13
|
||||
|
@ -2259,12 +2418,12 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
|
|||
PyObject *__pyx_v___pyx_result = 0;
|
||||
PyObject *__pyx_r = NULL;
|
||||
__Pyx_RefNannyDeclarations
|
||||
int __pyx_t_1;
|
||||
PyObject *__pyx_t_2 = NULL;
|
||||
PyObject *__pyx_t_3 = NULL;
|
||||
PyObject *__pyx_t_1 = NULL;
|
||||
int __pyx_t_2;
|
||||
int __pyx_t_3;
|
||||
PyObject *__pyx_t_4 = NULL;
|
||||
PyObject *__pyx_t_5 = NULL;
|
||||
int __pyx_t_6;
|
||||
PyObject *__pyx_t_6 = NULL;
|
||||
int __pyx_lineno = 0;
|
||||
const char *__pyx_filename = NULL;
|
||||
int __pyx_clineno = 0;
|
||||
|
@ -2273,114 +2432,118 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
|
|||
/* "(tree fragment)":4
|
||||
* cdef object __pyx_PickleError
|
||||
* cdef object __pyx_result
|
||||
* if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<<
|
||||
* if __pyx_checksum not in (0x770cb8f, 0xeecf561, 0x545205d): # <<<<<<<<<<<<<<
|
||||
* from pickle import PickleError as __pyx_PickleError
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
*/
|
||||
__pyx_t_1 = ((__pyx_v___pyx_checksum != 0x770cb8f) != 0);
|
||||
if (__pyx_t_1) {
|
||||
__pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_1);
|
||||
__pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_tuple__2, Py_NE)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(1, 4, __pyx_L1_error)
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_t_3 = (__pyx_t_2 != 0);
|
||||
if (__pyx_t_3) {
|
||||
|
||||
/* "(tree fragment)":5
|
||||
* cdef object __pyx_result
|
||||
* if __pyx_checksum != 0x770cb8f:
|
||||
* if __pyx_checksum not in (0x770cb8f, 0xeecf561, 0x545205d):
|
||||
* from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<<
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
* __pyx_result = reify.__new__(__pyx_type)
|
||||
*/
|
||||
__pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_2);
|
||||
__pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_1);
|
||||
__Pyx_INCREF(__pyx_n_s_PickleError);
|
||||
__Pyx_GIVEREF(__pyx_n_s_PickleError);
|
||||
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
|
||||
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_3);
|
||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_2);
|
||||
__Pyx_INCREF(__pyx_t_2);
|
||||
__pyx_v___pyx_PickleError = __pyx_t_2;
|
||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
|
||||
PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_PickleError);
|
||||
__pyx_t_4 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_1, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_4);
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_4, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_1);
|
||||
__Pyx_INCREF(__pyx_t_1);
|
||||
__pyx_v___pyx_PickleError = __pyx_t_1;
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
|
||||
|
||||
/* "(tree fragment)":6
|
||||
* if __pyx_checksum != 0x770cb8f:
|
||||
* if __pyx_checksum not in (0x770cb8f, 0xeecf561, 0x545205d):
|
||||
* from pickle import PickleError as __pyx_PickleError
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum) # <<<<<<<<<<<<<<
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum) # <<<<<<<<<<<<<<
|
||||
* __pyx_result = reify.__new__(__pyx_type)
|
||||
* if __pyx_state is not None:
|
||||
*/
|
||||
__pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_2);
|
||||
__pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_4);
|
||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__pyx_t_1 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_1);
|
||||
__pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_5);
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__Pyx_INCREF(__pyx_v___pyx_PickleError);
|
||||
__pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {
|
||||
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2);
|
||||
if (likely(__pyx_t_5)) {
|
||||
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
|
||||
__Pyx_INCREF(__pyx_t_5);
|
||||
__pyx_t_1 = __pyx_v___pyx_PickleError; __pyx_t_6 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {
|
||||
__pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1);
|
||||
if (likely(__pyx_t_6)) {
|
||||
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
|
||||
__Pyx_INCREF(__pyx_t_6);
|
||||
__Pyx_INCREF(function);
|
||||
__Pyx_DECREF_SET(__pyx_t_2, function);
|
||||
__Pyx_DECREF_SET(__pyx_t_1, function);
|
||||
}
|
||||
}
|
||||
__pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_5, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4);
|
||||
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
|
||||
__pyx_t_4 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_5);
|
||||
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
|
||||
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
|
||||
if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_4);
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__Pyx_Raise(__pyx_t_4, 0, 0, 0);
|
||||
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
|
||||
if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 6, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_3);
|
||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__Pyx_Raise(__pyx_t_3, 0, 0, 0);
|
||||
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
|
||||
__PYX_ERR(1, 6, __pyx_L1_error)
|
||||
|
||||
/* "(tree fragment)":4
|
||||
* cdef object __pyx_PickleError
|
||||
* cdef object __pyx_result
|
||||
* if __pyx_checksum != 0x770cb8f: # <<<<<<<<<<<<<<
|
||||
* if __pyx_checksum not in (0x770cb8f, 0xeecf561, 0x545205d): # <<<<<<<<<<<<<<
|
||||
* from pickle import PickleError as __pyx_PickleError
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
*/
|
||||
}
|
||||
|
||||
/* "(tree fragment)":7
|
||||
* from pickle import PickleError as __pyx_PickleError
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
* __pyx_result = reify.__new__(__pyx_type) # <<<<<<<<<<<<<<
|
||||
* if __pyx_state is not None:
|
||||
* __pyx_unpickle_reify__set_state(<reify> __pyx_result, __pyx_state)
|
||||
*/
|
||||
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_8_helpers_reify), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 7, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_2);
|
||||
__pyx_t_4 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) {
|
||||
__pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2);
|
||||
if (likely(__pyx_t_4)) {
|
||||
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
|
||||
__Pyx_INCREF(__pyx_t_4);
|
||||
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_8_helpers_reify), __pyx_n_s_new); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 7, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_1);
|
||||
__pyx_t_5 = NULL;
|
||||
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) {
|
||||
__pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1);
|
||||
if (likely(__pyx_t_5)) {
|
||||
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);
|
||||
__Pyx_INCREF(__pyx_t_5);
|
||||
__Pyx_INCREF(function);
|
||||
__Pyx_DECREF_SET(__pyx_t_2, function);
|
||||
__Pyx_DECREF_SET(__pyx_t_1, function);
|
||||
}
|
||||
}
|
||||
__pyx_t_3 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_4, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type);
|
||||
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
|
||||
if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_3);
|
||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||
__pyx_v___pyx_result = __pyx_t_3;
|
||||
__pyx_t_3 = 0;
|
||||
__pyx_t_4 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_5, __pyx_v___pyx_type) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v___pyx_type);
|
||||
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
|
||||
if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 7, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_4);
|
||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||
__pyx_v___pyx_result = __pyx_t_4;
|
||||
__pyx_t_4 = 0;
|
||||
|
||||
/* "(tree fragment)":8
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
* __pyx_result = reify.__new__(__pyx_type)
|
||||
* if __pyx_state is not None: # <<<<<<<<<<<<<<
|
||||
* __pyx_unpickle_reify__set_state(<reify> __pyx_result, __pyx_state)
|
||||
* return __pyx_result
|
||||
*/
|
||||
__pyx_t_1 = (__pyx_v___pyx_state != Py_None);
|
||||
__pyx_t_6 = (__pyx_t_1 != 0);
|
||||
if (__pyx_t_6) {
|
||||
__pyx_t_3 = (__pyx_v___pyx_state != Py_None);
|
||||
__pyx_t_2 = (__pyx_t_3 != 0);
|
||||
if (__pyx_t_2) {
|
||||
|
||||
/* "(tree fragment)":9
|
||||
* __pyx_result = reify.__new__(__pyx_type)
|
||||
|
@ -2390,12 +2553,12 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
|
|||
* cdef __pyx_unpickle_reify__set_state(reify __pyx_result, tuple __pyx_state):
|
||||
*/
|
||||
if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 9, __pyx_L1_error)
|
||||
__pyx_t_3 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 9, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_3);
|
||||
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
|
||||
__pyx_t_4 = __pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(((struct __pyx_obj_7aiohttp_8_helpers_reify *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 9, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_t_4);
|
||||
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
|
||||
|
||||
/* "(tree fragment)":8
|
||||
* raise __pyx_PickleError("Incompatible checksums (%s vs 0x770cb8f = (name, wrapped))" % __pyx_checksum)
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
* __pyx_result = reify.__new__(__pyx_type)
|
||||
* if __pyx_state is not None: # <<<<<<<<<<<<<<
|
||||
* __pyx_unpickle_reify__set_state(<reify> __pyx_result, __pyx_state)
|
||||
|
@ -2423,10 +2586,10 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
|
|||
|
||||
/* function exit code */
|
||||
__pyx_L1_error:;
|
||||
__Pyx_XDECREF(__pyx_t_2);
|
||||
__Pyx_XDECREF(__pyx_t_3);
|
||||
__Pyx_XDECREF(__pyx_t_1);
|
||||
__Pyx_XDECREF(__pyx_t_4);
|
||||
__Pyx_XDECREF(__pyx_t_5);
|
||||
__Pyx_XDECREF(__pyx_t_6);
|
||||
__Pyx_AddTraceback("aiohttp._helpers.__pyx_unpickle_reify", __pyx_clineno, __pyx_lineno, __pyx_filename);
|
||||
__pyx_r = NULL;
|
||||
__pyx_L0:;
|
||||
|
@ -2726,12 +2889,15 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
|
|||
#if PY_VERSION_HEX >= 0x030400a1
|
||||
0, /*tp_finalize*/
|
||||
#endif
|
||||
#if PY_VERSION_HEX >= 0x030800b1
|
||||
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
|
||||
0, /*tp_vectorcall*/
|
||||
#endif
|
||||
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
|
||||
0, /*tp_print*/
|
||||
#endif
|
||||
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
|
||||
0, /*tp_pypy_flags*/
|
||||
#endif
|
||||
};
|
||||
|
||||
static PyMethodDef __pyx_methods[] = {
|
||||
|
@ -2781,7 +2947,7 @@ static struct PyModuleDef __pyx_moduledef = {
|
|||
|
||||
static __Pyx_StringTabEntry __pyx_string_tab[] = {
|
||||
{&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1},
|
||||
{&__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_k_Incompatible_checksums_s_vs_0x77, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x77), 0, 0, 1, 0},
|
||||
{&__pyx_kp_s_Incompatible_checksums_0x_x_vs_0, __pyx_k_Incompatible_checksums_0x_x_vs_0, sizeof(__pyx_k_Incompatible_checksums_0x_x_vs_0), 0, 0, 1, 0},
|
||||
{&__pyx_n_s_KeyError, __pyx_k_KeyError, sizeof(__pyx_k_KeyError), 0, 0, 1, 1},
|
||||
{&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1},
|
||||
{&__pyx_n_s_aiohttp__helpers, __pyx_k_aiohttp__helpers, sizeof(__pyx_k_aiohttp__helpers), 0, 0, 1, 1},
|
||||
|
@ -2835,15 +3001,26 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
|
|||
__Pyx_GOTREF(__pyx_tuple_);
|
||||
__Pyx_GIVEREF(__pyx_tuple_);
|
||||
|
||||
/* "(tree fragment)":4
|
||||
* cdef object __pyx_PickleError
|
||||
* cdef object __pyx_result
|
||||
* if __pyx_checksum not in (0x770cb8f, 0xeecf561, 0x545205d): # <<<<<<<<<<<<<<
|
||||
* from pickle import PickleError as __pyx_PickleError
|
||||
* raise __pyx_PickleError("Incompatible checksums (0x%x vs (0x770cb8f, 0xeecf561, 0x545205d) = (name, wrapped))" % __pyx_checksum)
|
||||
*/
|
||||
__pyx_tuple__2 = PyTuple_Pack(3, __pyx_int_124832655, __pyx_int_250410337, __pyx_int_88416349); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_tuple__2);
|
||||
__Pyx_GIVEREF(__pyx_tuple__2);
|
||||
|
||||
/* "(tree fragment)":1
|
||||
* def __pyx_unpickle_reify(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<<
|
||||
* cdef object __pyx_PickleError
|
||||
* cdef object __pyx_result
|
||||
*/
|
||||
__pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_tuple__2);
|
||||
__Pyx_GIVEREF(__pyx_tuple__2);
|
||||
__pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_reify, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error)
|
||||
__pyx_tuple__3 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 1, __pyx_L1_error)
|
||||
__Pyx_GOTREF(__pyx_tuple__3);
|
||||
__Pyx_GIVEREF(__pyx_tuple__3);
|
||||
__pyx_codeobj__4 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__3, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_reify, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__4)) __PYX_ERR(1, 1, __pyx_L1_error)
|
||||
__Pyx_RefNannyFinishContext();
|
||||
return 0;
|
||||
__pyx_L1_error:;
|
||||
|
@ -2853,7 +3030,9 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
|
|||
|
||||
static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
|
||||
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
||||
__pyx_int_88416349 = PyInt_FromLong(88416349L); if (unlikely(!__pyx_int_88416349)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||
__pyx_int_124832655 = PyInt_FromLong(124832655L); if (unlikely(!__pyx_int_124832655)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||
__pyx_int_250410337 = PyInt_FromLong(250410337L); if (unlikely(!__pyx_int_250410337)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||
return 0;
|
||||
__pyx_L1_error:;
|
||||
return -1;
|
||||
|
@ -3084,11 +3263,9 @@ if (!__Pyx_RefNanny) {
|
|||
#endif
|
||||
/*--- Library function declarations ---*/
|
||||
/*--- Threads initialization code ---*/
|
||||
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
|
||||
#ifdef WITH_THREAD /* Python build with threading support? */
|
||||
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
|
||||
PyEval_InitThreads();
|
||||
#endif
|
||||
#endif
|
||||
/*--- Module creation code ---*/
|
||||
#if CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
__pyx_m = __pyx_pyinit_module;
|
||||
|
@ -3790,7 +3967,7 @@ done:
|
|||
#if CYTHON_COMPILING_IN_CPYTHON
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
|
||||
PyObject *result;
|
||||
ternaryfunc call = func->ob_type->tp_call;
|
||||
ternaryfunc call = Py_TYPE(func)->tp_call;
|
||||
if (unlikely(!call))
|
||||
return PyObject_Call(func, arg, kw);
|
||||
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
|
||||
|
@ -3877,7 +4054,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec
|
|||
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
|
||||
return __Pyx_PyObject_CallMethO(func, arg);
|
||||
#if CYTHON_FAST_PYCCALL
|
||||
} else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
|
||||
} else if (__Pyx_PyFastCFunction_Check(func)) {
|
||||
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
|
||||
#endif
|
||||
}
|
||||
|
@ -4356,17 +4533,35 @@ static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
|
|||
static int __Pyx_setup_reduce(PyObject* type_obj) {
|
||||
int ret = 0;
|
||||
PyObject *object_reduce = NULL;
|
||||
PyObject *object_getstate = NULL;
|
||||
PyObject *object_reduce_ex = NULL;
|
||||
PyObject *reduce = NULL;
|
||||
PyObject *reduce_ex = NULL;
|
||||
PyObject *reduce_cython = NULL;
|
||||
PyObject *setstate = NULL;
|
||||
PyObject *setstate_cython = NULL;
|
||||
PyObject *getstate = NULL;
|
||||
#if CYTHON_USE_PYTYPE_LOOKUP
|
||||
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
|
||||
getstate = _PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate);
|
||||
#else
|
||||
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
|
||||
getstate = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_getstate);
|
||||
if (!getstate && PyErr_Occurred()) {
|
||||
goto __PYX_BAD;
|
||||
}
|
||||
#endif
|
||||
if (getstate) {
|
||||
#if CYTHON_USE_PYTYPE_LOOKUP
|
||||
object_getstate = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_getstate);
|
||||
#else
|
||||
object_getstate = __Pyx_PyObject_GetAttrStrNoError((PyObject*)&PyBaseObject_Type, __pyx_n_s_getstate);
|
||||
if (!object_getstate && PyErr_Occurred()) {
|
||||
goto __PYX_BAD;
|
||||
}
|
||||
#endif
|
||||
if (object_getstate != getstate) {
|
||||
goto __PYX_GOOD;
|
||||
}
|
||||
}
|
||||
#if CYTHON_USE_PYTYPE_LOOKUP
|
||||
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
|
||||
#else
|
||||
|
@ -4411,6 +4606,8 @@ __PYX_GOOD:
|
|||
#if !CYTHON_USE_PYTYPE_LOOKUP
|
||||
Py_XDECREF(object_reduce);
|
||||
Py_XDECREF(object_reduce_ex);
|
||||
Py_XDECREF(object_getstate);
|
||||
Py_XDECREF(getstate);
|
||||
#endif
|
||||
Py_XDECREF(reduce);
|
||||
Py_XDECREF(reduce_ex);
|
||||
|
@ -4452,7 +4649,7 @@ static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int
|
|||
}
|
||||
if (!use_cline) {
|
||||
c_line = 0;
|
||||
PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
|
||||
(void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
|
||||
}
|
||||
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
|
||||
c_line = 0;
|
||||
|
@ -4546,33 +4743,40 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
|
|||
#include "compile.h"
|
||||
#include "frameobject.h"
|
||||
#include "traceback.h"
|
||||
#if PY_VERSION_HEX >= 0x030b00a6
|
||||
#ifndef Py_BUILD_CORE
|
||||
#define Py_BUILD_CORE 1
|
||||
#endif
|
||||
#include "internal/pycore_frame.h"
|
||||
#endif
|
||||
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
|
||||
const char *funcname, int c_line,
|
||||
int py_line, const char *filename) {
|
||||
PyCodeObject *py_code = 0;
|
||||
PyObject *py_srcfile = 0;
|
||||
PyObject *py_funcname = 0;
|
||||
PyCodeObject *py_code = NULL;
|
||||
PyObject *py_funcname = NULL;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
PyObject *py_srcfile = NULL;
|
||||
py_srcfile = PyString_FromString(filename);
|
||||
#else
|
||||
py_srcfile = PyUnicode_FromString(filename);
|
||||
#endif
|
||||
if (!py_srcfile) goto bad;
|
||||
#endif
|
||||
if (c_line) {
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
|
||||
if (!py_funcname) goto bad;
|
||||
#else
|
||||
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
|
||||
if (!py_funcname) goto bad;
|
||||
funcname = PyUnicode_AsUTF8(py_funcname);
|
||||
if (!funcname) goto bad;
|
||||
#endif
|
||||
}
|
||||
else {
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_funcname = PyString_FromString(funcname);
|
||||
#else
|
||||
py_funcname = PyUnicode_FromString(funcname);
|
||||
if (!py_funcname) goto bad;
|
||||
#endif
|
||||
}
|
||||
if (!py_funcname) goto bad;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_code = __Pyx_PyCode_New(
|
||||
0,
|
||||
0,
|
||||
|
@ -4591,11 +4795,16 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
|
|||
__pyx_empty_bytes /*PyObject *lnotab*/
|
||||
);
|
||||
Py_DECREF(py_srcfile);
|
||||
Py_DECREF(py_funcname);
|
||||
#else
|
||||
py_code = PyCode_NewEmpty(filename, funcname, py_line);
|
||||
#endif
|
||||
Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline
|
||||
return py_code;
|
||||
bad:
|
||||
Py_XDECREF(py_srcfile);
|
||||
Py_XDECREF(py_funcname);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
Py_XDECREF(py_srcfile);
|
||||
#endif
|
||||
return NULL;
|
||||
}
|
||||
static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
||||
|
@ -4603,14 +4812,24 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
|||
PyCodeObject *py_code = 0;
|
||||
PyFrameObject *py_frame = 0;
|
||||
PyThreadState *tstate = __Pyx_PyThreadState_Current;
|
||||
PyObject *ptype, *pvalue, *ptraceback;
|
||||
if (c_line) {
|
||||
c_line = __Pyx_CLineForTraceback(tstate, c_line);
|
||||
}
|
||||
py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
|
||||
if (!py_code) {
|
||||
__Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
|
||||
py_code = __Pyx_CreateCodeObjectForTraceback(
|
||||
funcname, c_line, py_line, filename);
|
||||
if (!py_code) goto bad;
|
||||
if (!py_code) {
|
||||
/* If the code object creation fails, then we should clear the
|
||||
fetched exception references and propagate the new exception */
|
||||
Py_XDECREF(ptype);
|
||||
Py_XDECREF(pvalue);
|
||||
Py_XDECREF(ptraceback);
|
||||
goto bad;
|
||||
}
|
||||
__Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
|
||||
__pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
|
||||
}
|
||||
py_frame = PyFrame_New(
|
||||
|
@ -4649,40 +4868,16 @@ bad:
|
|||
return (target_type) value;\
|
||||
}
|
||||
|
||||
/* CIntToPy */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
|
||||
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
if (is_unsigned) {
|
||||
if (sizeof(long) < sizeof(long)) {
|
||||
return PyInt_FromLong((long) value);
|
||||
} else if (sizeof(long) <= sizeof(unsigned long)) {
|
||||
return PyLong_FromUnsignedLong((unsigned long) value);
|
||||
#ifdef HAVE_LONG_LONG
|
||||
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
|
||||
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
if (sizeof(long) <= sizeof(long)) {
|
||||
return PyInt_FromLong((long) value);
|
||||
#ifdef HAVE_LONG_LONG
|
||||
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
|
||||
return PyLong_FromLongLong((PY_LONG_LONG) value);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
{
|
||||
int one = 1; int little = (int)*(unsigned char *)&one;
|
||||
unsigned char *bytes = (unsigned char *)&value;
|
||||
return _PyLong_FromByteArray(bytes, sizeof(long),
|
||||
little, !is_unsigned);
|
||||
}
|
||||
}
|
||||
|
||||
/* CIntFromPy */
|
||||
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
|
||||
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const long neg_one = (long) -1, const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (likely(PyInt_Check(x))) {
|
||||
|
@ -4869,9 +5064,54 @@ raise_neg_overflow:
|
|||
return (long) -1;
|
||||
}
|
||||
|
||||
/* CIntToPy */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const long neg_one = (long) -1, const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
if (is_unsigned) {
|
||||
if (sizeof(long) < sizeof(long)) {
|
||||
return PyInt_FromLong((long) value);
|
||||
} else if (sizeof(long) <= sizeof(unsigned long)) {
|
||||
return PyLong_FromUnsignedLong((unsigned long) value);
|
||||
#ifdef HAVE_LONG_LONG
|
||||
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
|
||||
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
if (sizeof(long) <= sizeof(long)) {
|
||||
return PyInt_FromLong((long) value);
|
||||
#ifdef HAVE_LONG_LONG
|
||||
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
|
||||
return PyLong_FromLongLong((PY_LONG_LONG) value);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
{
|
||||
int one = 1; int little = (int)*(unsigned char *)&one;
|
||||
unsigned char *bytes = (unsigned char *)&value;
|
||||
return _PyLong_FromByteArray(bytes, sizeof(long),
|
||||
little, !is_unsigned);
|
||||
}
|
||||
}
|
||||
|
||||
/* CIntFromPy */
|
||||
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
|
||||
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const int neg_one = (int) -1, const_zero = (int) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (likely(PyInt_Check(x))) {
|
||||
|
@ -5160,11 +5400,33 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj
|
|||
|
||||
/* CheckBinaryVersion */
|
||||
static int __Pyx_check_binary_version(void) {
|
||||
char ctversion[4], rtversion[4];
|
||||
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
|
||||
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
|
||||
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
|
||||
char ctversion[5];
|
||||
int same=1, i, found_dot;
|
||||
const char* rt_from_call = Py_GetVersion();
|
||||
PyOS_snprintf(ctversion, 5, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
|
||||
found_dot = 0;
|
||||
for (i = 0; i < 4; i++) {
|
||||
if (!ctversion[i]) {
|
||||
same = (rt_from_call[i] < '0' || rt_from_call[i] > '9');
|
||||
break;
|
||||
}
|
||||
if (rt_from_call[i] != ctversion[i]) {
|
||||
same = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!same) {
|
||||
char rtversion[5] = {'\0'};
|
||||
char message[200];
|
||||
for (i=0; i<4; ++i) {
|
||||
if (rt_from_call[i] == '.') {
|
||||
if (found_dot) break;
|
||||
found_dot = 1;
|
||||
} else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') {
|
||||
break;
|
||||
}
|
||||
rtversion[i] = rt_from_call[i];
|
||||
}
|
||||
PyOS_snprintf(message, sizeof(message),
|
||||
"compiletime version %s of module '%.100s' "
|
||||
"does not match runtime version %s",
|
||||
|
@ -5422,6 +5684,23 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
|
|||
Py_DECREF(x);
|
||||
return ival;
|
||||
}
|
||||
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) {
|
||||
if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) {
|
||||
return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
} else if (likely(PyInt_CheckExact(o))) {
|
||||
return PyInt_AS_LONG(o);
|
||||
#endif
|
||||
} else {
|
||||
Py_ssize_t ival;
|
||||
PyObject *x;
|
||||
x = PyNumber_Index(o);
|
||||
if (!x) return -1;
|
||||
ival = PyInt_AsLong(x);
|
||||
Py_DECREF(x);
|
||||
return ival;
|
||||
}
|
||||
}
|
||||
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
|
||||
return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
|
||||
}
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -80,13 +80,13 @@ cdef inline object extend(object buf, const char* at, size_t length):
|
|||
memcpy(ptr + s, at, length)
|
||||
|
||||
|
||||
DEF METHODS_COUNT = 34;
|
||||
DEF METHODS_COUNT = 46;
|
||||
|
||||
cdef list _http_method = []
|
||||
|
||||
for i in range(METHODS_COUNT):
|
||||
_http_method.append(
|
||||
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
|
||||
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
||||
|
||||
|
||||
cdef inline str http_method_str(int i):
|
||||
|
@ -272,8 +272,8 @@ cdef _new_response_message(object version,
|
|||
cdef class HttpParser:
|
||||
|
||||
cdef:
|
||||
cparser.http_parser* _cparser
|
||||
cparser.http_parser_settings* _csettings
|
||||
cparser.llhttp_t* _cparser
|
||||
cparser.llhttp_settings_t* _csettings
|
||||
|
||||
bytearray _raw_name
|
||||
bytearray _raw_value
|
||||
|
@ -310,13 +310,13 @@ cdef class HttpParser:
|
|||
Py_buffer py_buf
|
||||
|
||||
def __cinit__(self):
|
||||
self._cparser = <cparser.http_parser*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser))
|
||||
self._cparser = <cparser.llhttp_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
||||
if self._cparser is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
self._csettings = <cparser.http_parser_settings*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_settings))
|
||||
self._csettings = <cparser.llhttp_settings_t*> \
|
||||
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
||||
if self._csettings is NULL:
|
||||
raise MemoryError()
|
||||
|
||||
|
@ -324,19 +324,20 @@ cdef class HttpParser:
|
|||
PyMem_Free(self._cparser)
|
||||
PyMem_Free(self._csettings)
|
||||
|
||||
cdef _init(self, cparser.http_parser_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True):
|
||||
cparser.http_parser_init(self._cparser, mode)
|
||||
cdef _init(
|
||||
self, cparser.llhttp_type mode,
|
||||
object protocol, object loop, int limit,
|
||||
object timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
cparser.llhttp_settings_init(self._csettings)
|
||||
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
||||
self._cparser.data = <void*>self
|
||||
self._cparser.content_length = 0
|
||||
|
||||
cparser.http_parser_settings_init(self._csettings)
|
||||
|
||||
self._protocol = protocol
|
||||
self._loop = loop
|
||||
self._timer = timer
|
||||
|
@ -417,14 +418,14 @@ cdef class HttpParser:
|
|||
self._process_header()
|
||||
|
||||
method = http_method_str(self._cparser.method)
|
||||
should_close = not cparser.http_should_keep_alive(self._cparser)
|
||||
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
||||
upgrade = self._cparser.upgrade
|
||||
chunked = self._cparser.flags & cparser.F_CHUNKED
|
||||
|
||||
raw_headers = tuple(self._raw_headers)
|
||||
headers = CIMultiDictProxy(self._headers)
|
||||
|
||||
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
||||
if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
|
||||
self._upgraded = True
|
||||
|
||||
# do not support old websocket spec
|
||||
|
@ -450,11 +451,12 @@ cdef class HttpParser:
|
|||
headers, raw_headers, should_close, encoding,
|
||||
upgrade, chunked)
|
||||
|
||||
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == 5 or # CONNECT: 5
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == ULLONG_MAX and
|
||||
self._read_until_eof)
|
||||
if (
|
||||
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
||||
self._cparser.method == cparser.HTTP_CONNECT or
|
||||
(self._cparser.status_code >= 199 and
|
||||
self._cparser.content_length == 0 and
|
||||
self._read_until_eof)
|
||||
):
|
||||
payload = StreamReader(
|
||||
self._protocol, timer=self._timer, loop=self._loop,
|
||||
|
@ -485,7 +487,7 @@ cdef class HttpParser:
|
|||
pass
|
||||
|
||||
cdef inline http_version(self):
|
||||
cdef cparser.http_parser* parser = self._cparser
|
||||
cdef cparser.llhttp_t* parser = self._cparser
|
||||
|
||||
if parser.http_major == 1:
|
||||
if parser.http_minor == 0:
|
||||
|
@ -504,12 +506,11 @@ cdef class HttpParser:
|
|||
if self._cparser.flags & cparser.F_CHUNKED:
|
||||
raise TransferEncodingError(
|
||||
"Not enough data for satisfy transfer length header.")
|
||||
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
|
||||
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
||||
raise ContentLengthError(
|
||||
"Not enough data for satisfy content length header.")
|
||||
elif self._cparser.http_errno != cparser.HPE_OK:
|
||||
desc = cparser.http_errno_description(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
||||
desc = cparser.llhttp_get_error_reason(self._cparser)
|
||||
raise PayloadEncodingError(desc.decode('latin-1'))
|
||||
else:
|
||||
self._payload.feed_eof()
|
||||
|
@ -522,26 +523,36 @@ cdef class HttpParser:
|
|||
cdef:
|
||||
size_t data_len
|
||||
size_t nb
|
||||
cdef cparser.llhttp_errno_t errno
|
||||
|
||||
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
||||
data_len = <size_t>self.py_buf.len
|
||||
|
||||
nb = cparser.http_parser_execute(
|
||||
errno = cparser.llhttp_execute(
|
||||
self._cparser,
|
||||
self._csettings,
|
||||
<char*>self.py_buf.buf,
|
||||
data_len)
|
||||
|
||||
if errno is cparser.HPE_PAUSED_UPGRADE:
|
||||
cparser.llhttp_resume_after_upgrade(self._cparser)
|
||||
|
||||
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
||||
|
||||
PyBuffer_Release(&self.py_buf)
|
||||
|
||||
if (self._cparser.http_errno != cparser.HPE_OK):
|
||||
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
||||
if self._payload_error == 0:
|
||||
if self._last_error is not None:
|
||||
ex = self._last_error
|
||||
self._last_error = None
|
||||
else:
|
||||
ex = parser_error_from_errno(
|
||||
<cparser.http_errno> self._cparser.http_errno)
|
||||
after = cparser.llhttp_get_error_pos(self._cparser)
|
||||
before = data[:after - <char*>self.py_buf.buf]
|
||||
after_b = after.split(b"\n", 1)[0]
|
||||
before = before.rsplit(b"\n", 1)[-1]
|
||||
data = before + after_b
|
||||
pointer = " " * (len(repr(before))-1) + "^"
|
||||
ex = parser_error_from_errno(self._cparser, data, pointer)
|
||||
self._payload = None
|
||||
raise ex
|
||||
|
||||
|
@ -562,39 +573,76 @@ cdef class HttpParser:
|
|||
|
||||
cdef class HttpRequestParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True,
|
||||
):
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof)
|
||||
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
payload_exception, response_with_body, read_until_eof,
|
||||
auto_decompress)
|
||||
|
||||
cdef object _on_status_complete(self):
|
||||
cdef Py_buffer py_buf
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
if self._cparser.method == 5: # CONNECT
|
||||
self._url = URL(self._path)
|
||||
else:
|
||||
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
self._url = _parse_url(<char*>py_buf.buf,
|
||||
py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
cdef int idx1, idx2
|
||||
if not self._buf:
|
||||
return
|
||||
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
||||
try:
|
||||
idx3 = len(self._path)
|
||||
if self._cparser.method == cparser.HTTP_CONNECT:
|
||||
# authority-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
||||
self._url = URL.build(authority=self._path, encoded=True)
|
||||
elif idx3 > 1 and self._path[0] == '/':
|
||||
# origin-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
||||
idx1 = self._path.find("?")
|
||||
if idx1 == -1:
|
||||
query = ""
|
||||
idx2 = self._path.find("#")
|
||||
if idx2 == -1:
|
||||
path = self._path
|
||||
fragment = ""
|
||||
else:
|
||||
path = self._path[0: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
else:
|
||||
path = self._path[0:idx1]
|
||||
idx1 += 1
|
||||
idx2 = self._path.find("#", idx1+1)
|
||||
if idx2 == -1:
|
||||
query = self._path[idx1:]
|
||||
fragment = ""
|
||||
else:
|
||||
query = self._path[idx1: idx2]
|
||||
fragment = self._path[idx2+1:]
|
||||
|
||||
self._url = URL.build(
|
||||
path=path,
|
||||
query_string=query,
|
||||
fragment=fragment,
|
||||
encoded=True,
|
||||
)
|
||||
else:
|
||||
# absolute-form for proxy maybe,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
||||
self._url = URL(self._path, encoded=True)
|
||||
finally:
|
||||
PyByteArray_Resize(self._buf, 0)
|
||||
|
||||
|
||||
cdef class HttpResponseParser(HttpParser):
|
||||
|
||||
def __init__(self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
def __init__(
|
||||
self, protocol, loop, int limit, timer=None,
|
||||
size_t max_line_size=8190, size_t max_headers=32768,
|
||||
size_t max_field_size=8190, payload_exception=None,
|
||||
bint response_with_body=True, bint read_until_eof=False,
|
||||
bint auto_decompress=True
|
||||
):
|
||||
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
||||
max_line_size, max_headers, max_field_size,
|
||||
|
@ -608,7 +656,7 @@ cdef class HttpResponseParser(HttpParser):
|
|||
else:
|
||||
self._reason = self._reason or ''
|
||||
|
||||
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
||||
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
|
||||
pyparser._started = True
|
||||
|
@ -620,7 +668,7 @@ cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_url(cparser.http_parser* parser,
|
||||
cdef int cb_on_url(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
|
@ -635,7 +683,7 @@ cdef int cb_on_url(cparser.http_parser* parser,
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_status(cparser.http_parser* parser,
|
||||
cdef int cb_on_status(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef str reason
|
||||
|
@ -651,7 +699,7 @@ cdef int cb_on_status(cparser.http_parser* parser,
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_field(cparser.http_parser* parser,
|
||||
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
|
@ -669,7 +717,7 @@ cdef int cb_on_header_field(cparser.http_parser* parser,
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_header_value(cparser.http_parser* parser,
|
||||
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef Py_ssize_t size
|
||||
|
@ -686,7 +734,7 @@ cdef int cb_on_header_value(cparser.http_parser* parser,
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
||||
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_status_complete()
|
||||
|
@ -695,13 +743,16 @@ cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
|
|||
pyparser._last_error = exc
|
||||
return -1
|
||||
else:
|
||||
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
||||
if (
|
||||
pyparser._cparser.upgrade or
|
||||
pyparser._cparser.method == cparser.HTTP_CONNECT
|
||||
):
|
||||
return 2
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_body(cparser.http_parser* parser,
|
||||
cdef int cb_on_body(cparser.llhttp_t* parser,
|
||||
const char *at, size_t length) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
cdef bytes body = at[:length]
|
||||
|
@ -718,7 +769,7 @@ cdef int cb_on_body(cparser.http_parser* parser,
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
||||
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._started = False
|
||||
|
@ -730,7 +781,7 @@ cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
||||
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_header()
|
||||
|
@ -741,7 +792,7 @@ cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
|
|||
return 0
|
||||
|
||||
|
||||
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
||||
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||
try:
|
||||
pyparser._on_chunk_complete()
|
||||
|
@ -752,19 +803,21 @@ cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
|
|||
return 0
|
||||
|
||||
|
||||
cdef parser_error_from_errno(cparser.http_errno errno):
|
||||
cdef bytes desc = cparser.http_errno_description(errno)
|
||||
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
||||
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
||||
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
||||
|
||||
if errno in (cparser.HPE_CB_message_begin,
|
||||
cparser.HPE_CB_url,
|
||||
cparser.HPE_CB_header_field,
|
||||
cparser.HPE_CB_header_value,
|
||||
cparser.HPE_CB_headers_complete,
|
||||
cparser.HPE_CB_body,
|
||||
cparser.HPE_CB_message_complete,
|
||||
cparser.HPE_CB_status,
|
||||
cparser.HPE_CB_chunk_header,
|
||||
cparser.HPE_CB_chunk_complete):
|
||||
if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
|
||||
cparser.HPE_CB_HEADERS_COMPLETE,
|
||||
cparser.HPE_CB_MESSAGE_COMPLETE,
|
||||
cparser.HPE_CB_CHUNK_HEADER,
|
||||
cparser.HPE_CB_CHUNK_COMPLETE,
|
||||
cparser.HPE_INVALID_CONSTANT,
|
||||
cparser.HPE_INVALID_HEADER_TOKEN,
|
||||
cparser.HPE_INVALID_CONTENT_LENGTH,
|
||||
cparser.HPE_INVALID_CHUNK_SIZE,
|
||||
cparser.HPE_INVALID_EOF_STATE,
|
||||
cparser.HPE_INVALID_TRANSFER_ENCODING):
|
||||
cls = BadHttpMessage
|
||||
|
||||
elif errno == cparser.HPE_INVALID_STATUS:
|
||||
|
@ -773,103 +826,13 @@ cdef parser_error_from_errno(cparser.http_errno errno):
|
|||
elif errno == cparser.HPE_INVALID_METHOD:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_VERSION:
|
||||
cls = BadStatusLine
|
||||
|
||||
elif errno == cparser.HPE_INVALID_URL:
|
||||
cls = InvalidURLError
|
||||
|
||||
else:
|
||||
cls = BadHttpMessage
|
||||
|
||||
return cls(desc.decode('latin-1'))
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
cdef:
|
||||
Py_buffer py_buf
|
||||
char* buf_data
|
||||
|
||||
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
|
||||
try:
|
||||
buf_data = <char*>py_buf.buf
|
||||
return _parse_url(buf_data, py_buf.len)
|
||||
finally:
|
||||
PyBuffer_Release(&py_buf)
|
||||
|
||||
|
||||
cdef _parse_url(char* buf_data, size_t length):
|
||||
cdef:
|
||||
cparser.http_parser_url* parsed
|
||||
int res
|
||||
str schema = None
|
||||
str host = None
|
||||
object port = None
|
||||
str path = None
|
||||
str query = None
|
||||
str fragment = None
|
||||
str user = None
|
||||
str password = None
|
||||
str userinfo = None
|
||||
object result = None
|
||||
int off
|
||||
int ln
|
||||
|
||||
parsed = <cparser.http_parser_url*> \
|
||||
PyMem_Malloc(sizeof(cparser.http_parser_url))
|
||||
if parsed is NULL:
|
||||
raise MemoryError()
|
||||
cparser.http_parser_url_init(parsed)
|
||||
try:
|
||||
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
|
||||
|
||||
if res == 0:
|
||||
if parsed.field_set & (1 << cparser.UF_SCHEMA):
|
||||
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
|
||||
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
|
||||
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
schema = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_HOST):
|
||||
off = parsed.field_data[<int>cparser.UF_HOST].off
|
||||
ln = parsed.field_data[<int>cparser.UF_HOST].len
|
||||
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
host = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PORT):
|
||||
port = parsed.port
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_PATH):
|
||||
off = parsed.field_data[<int>cparser.UF_PATH].off
|
||||
ln = parsed.field_data[<int>cparser.UF_PATH].len
|
||||
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
path = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_QUERY):
|
||||
off = parsed.field_data[<int>cparser.UF_QUERY].off
|
||||
ln = parsed.field_data[<int>cparser.UF_QUERY].len
|
||||
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
query = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
|
||||
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
|
||||
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
|
||||
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
else:
|
||||
fragment = ''
|
||||
|
||||
if parsed.field_set & (1 << cparser.UF_USERINFO):
|
||||
off = parsed.field_data[<int>cparser.UF_USERINFO].off
|
||||
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
|
||||
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
|
||||
|
||||
user, sep, password = userinfo.partition(':')
|
||||
|
||||
return URL_build(scheme=schema,
|
||||
user=user, password=password, host=host, port=port,
|
||||
path=path, query_string=query, fragment=fragment, encoded=True)
|
||||
else:
|
||||
raise InvalidURLError("invalid url {!r}".format(buf_data))
|
||||
finally:
|
||||
PyMem_Free(parsed)
|
||||
return cls("{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer))
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -111,6 +111,14 @@ cdef str to_str(object s):
|
|||
return str(s)
|
||||
|
||||
|
||||
cdef void _safe_header(str string) except *:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return character detected in HTTP status message or "
|
||||
"header. This is a potential security issue."
|
||||
)
|
||||
|
||||
|
||||
def _serialize_headers(str status_line, headers):
|
||||
cdef Writer writer
|
||||
cdef object key
|
||||
|
@ -119,6 +127,10 @@ def _serialize_headers(str status_line, headers):
|
|||
|
||||
_init_writer(&writer)
|
||||
|
||||
for key, val in headers.items():
|
||||
_safe_header(to_str(key))
|
||||
_safe_header(to_str(val))
|
||||
|
||||
try:
|
||||
if _write_str(&writer, status_line) < 0:
|
||||
raise
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
/* Generated by Cython 0.29.21 */
|
||||
/* Generated by Cython 0.29.32 */
|
||||
|
||||
#ifndef PY_SSIZE_T_CLEAN
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#endif /* PY_SSIZE_T_CLEAN */
|
||||
#include "Python.h"
|
||||
#ifndef Py_PYTHON_H
|
||||
#error Python headers needed to compile C extensions, please install development version of Python.
|
||||
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
||||
#error Cython requires Python 2.6+ or Python 3.3+.
|
||||
#else
|
||||
#define CYTHON_ABI "0_29_21"
|
||||
#define CYTHON_HEX_VERSION 0x001D15F0
|
||||
#define CYTHON_ABI "0_29_32"
|
||||
#define CYTHON_HEX_VERSION 0x001D20F0
|
||||
#define CYTHON_FUTURE_DIVISION 1
|
||||
#include <stddef.h>
|
||||
#ifndef offsetof
|
||||
|
@ -47,6 +49,7 @@
|
|||
#define CYTHON_COMPILING_IN_PYPY 1
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#undef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 0
|
||||
#undef CYTHON_USE_PYTYPE_LOOKUP
|
||||
|
@ -83,10 +86,14 @@
|
|||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900)
|
||||
#endif
|
||||
#elif defined(PYSTON_VERSION)
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 1
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
|
@ -124,10 +131,59 @@
|
|||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
|
||||
#endif
|
||||
#elif defined(PY_NOGIL)
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 0
|
||||
#define CYTHON_COMPILING_IN_NOGIL 1
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_PYTYPE_LOOKUP
|
||||
#define CYTHON_USE_PYTYPE_LOOKUP 0
|
||||
#ifndef CYTHON_USE_ASYNC_SLOTS
|
||||
#define CYTHON_USE_ASYNC_SLOTS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_PYLIST_INTERNALS
|
||||
#define CYTHON_USE_PYLIST_INTERNALS 0
|
||||
#ifndef CYTHON_USE_UNICODE_INTERNALS
|
||||
#define CYTHON_USE_UNICODE_INTERNALS 1
|
||||
#endif
|
||||
#undef CYTHON_USE_UNICODE_WRITER
|
||||
#define CYTHON_USE_UNICODE_WRITER 0
|
||||
#undef CYTHON_USE_PYLONG_INTERNALS
|
||||
#define CYTHON_USE_PYLONG_INTERNALS 0
|
||||
#ifndef CYTHON_AVOID_BORROWED_REFS
|
||||
#define CYTHON_AVOID_BORROWED_REFS 0
|
||||
#endif
|
||||
#ifndef CYTHON_ASSUME_SAFE_MACROS
|
||||
#define CYTHON_ASSUME_SAFE_MACROS 1
|
||||
#endif
|
||||
#ifndef CYTHON_UNPACK_METHODS
|
||||
#define CYTHON_UNPACK_METHODS 1
|
||||
#endif
|
||||
#undef CYTHON_FAST_THREAD_STATE
|
||||
#define CYTHON_FAST_THREAD_STATE 0
|
||||
#undef CYTHON_FAST_PYCALL
|
||||
#define CYTHON_FAST_PYCALL 0
|
||||
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
#define CYTHON_PEP489_MULTI_PHASE_INIT 1
|
||||
#endif
|
||||
#ifndef CYTHON_USE_TP_FINALIZE
|
||||
#define CYTHON_USE_TP_FINALIZE 1
|
||||
#endif
|
||||
#undef CYTHON_USE_DICT_VERSIONS
|
||||
#define CYTHON_USE_DICT_VERSIONS 0
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#else
|
||||
#define CYTHON_COMPILING_IN_PYPY 0
|
||||
#define CYTHON_COMPILING_IN_PYSTON 0
|
||||
#define CYTHON_COMPILING_IN_CPYTHON 1
|
||||
#define CYTHON_COMPILING_IN_NOGIL 0
|
||||
#ifndef CYTHON_USE_TYPE_SLOTS
|
||||
#define CYTHON_USE_TYPE_SLOTS 1
|
||||
#endif
|
||||
|
@ -155,7 +211,7 @@
|
|||
#ifndef CYTHON_USE_UNICODE_INTERNALS
|
||||
#define CYTHON_USE_UNICODE_INTERNALS 1
|
||||
#endif
|
||||
#if PY_VERSION_HEX < 0x030300F0
|
||||
#if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
|
||||
#undef CYTHON_USE_UNICODE_WRITER
|
||||
#define CYTHON_USE_UNICODE_WRITER 0
|
||||
#elif !defined(CYTHON_USE_UNICODE_WRITER)
|
||||
|
@ -170,11 +226,14 @@
|
|||
#ifndef CYTHON_UNPACK_METHODS
|
||||
#define CYTHON_UNPACK_METHODS 1
|
||||
#endif
|
||||
#ifndef CYTHON_FAST_THREAD_STATE
|
||||
#if PY_VERSION_HEX >= 0x030B00A4
|
||||
#undef CYTHON_FAST_THREAD_STATE
|
||||
#define CYTHON_FAST_THREAD_STATE 0
|
||||
#elif !defined(CYTHON_FAST_THREAD_STATE)
|
||||
#define CYTHON_FAST_THREAD_STATE 1
|
||||
#endif
|
||||
#ifndef CYTHON_FAST_PYCALL
|
||||
#define CYTHON_FAST_PYCALL 1
|
||||
#define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030A0000)
|
||||
#endif
|
||||
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
|
||||
|
@ -185,15 +244,23 @@
|
|||
#ifndef CYTHON_USE_DICT_VERSIONS
|
||||
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
|
||||
#endif
|
||||
#ifndef CYTHON_USE_EXC_INFO_STACK
|
||||
#if PY_VERSION_HEX >= 0x030B00A4
|
||||
#undef CYTHON_USE_EXC_INFO_STACK
|
||||
#define CYTHON_USE_EXC_INFO_STACK 0
|
||||
#elif !defined(CYTHON_USE_EXC_INFO_STACK)
|
||||
#define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
|
||||
#endif
|
||||
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
|
||||
#define CYTHON_UPDATE_DESCRIPTOR_DOC 1
|
||||
#endif
|
||||
#endif
|
||||
#if !defined(CYTHON_FAST_PYCCALL)
|
||||
#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
|
||||
#endif
|
||||
#if CYTHON_USE_PYLONG_INTERNALS
|
||||
#include "longintrepr.h"
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
#include "longintrepr.h"
|
||||
#endif
|
||||
#undef SHIFT
|
||||
#undef BASE
|
||||
#undef MASK
|
||||
|
@ -310,9 +377,68 @@
|
|||
#define __Pyx_DefaultClassType PyClass_Type
|
||||
#else
|
||||
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
||||
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
|
||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||
#define __Pyx_DefaultClassType PyType_Type
|
||||
#if PY_VERSION_HEX >= 0x030B00A1
|
||||
static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f,
|
||||
PyObject *code, PyObject *c, PyObject* n, PyObject *v,
|
||||
PyObject *fv, PyObject *cell, PyObject* fn,
|
||||
PyObject *name, int fline, PyObject *lnos) {
|
||||
PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL;
|
||||
PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL;
|
||||
const char *fn_cstr=NULL;
|
||||
const char *name_cstr=NULL;
|
||||
PyCodeObject* co=NULL;
|
||||
PyObject *type, *value, *traceback;
|
||||
PyErr_Fetch(&type, &value, &traceback);
|
||||
if (!(kwds=PyDict_New())) goto end;
|
||||
if (!(argcount=PyLong_FromLong(a))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end;
|
||||
if (!(posonlyargcount=PyLong_FromLong(0))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end;
|
||||
if (!(kwonlyargcount=PyLong_FromLong(k))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end;
|
||||
if (!(nlocals=PyLong_FromLong(l))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end;
|
||||
if (!(stacksize=PyLong_FromLong(s))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end;
|
||||
if (!(flags=PyLong_FromLong(f))) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end;
|
||||
if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end;
|
||||
if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end;
|
||||
if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end;
|
||||
if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end;
|
||||
if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too;
|
||||
if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here
|
||||
if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too;
|
||||
Py_XDECREF((PyObject*)co);
|
||||
co = (PyCodeObject*)call_result;
|
||||
call_result = NULL;
|
||||
if (0) {
|
||||
cleanup_code_too:
|
||||
Py_XDECREF((PyObject*)co);
|
||||
co = NULL;
|
||||
}
|
||||
end:
|
||||
Py_XDECREF(kwds);
|
||||
Py_XDECREF(argcount);
|
||||
Py_XDECREF(posonlyargcount);
|
||||
Py_XDECREF(kwonlyargcount);
|
||||
Py_XDECREF(nlocals);
|
||||
Py_XDECREF(stacksize);
|
||||
Py_XDECREF(replace);
|
||||
Py_XDECREF(call_result);
|
||||
Py_XDECREF(empty);
|
||||
if (type) {
|
||||
PyErr_Restore(type, value, traceback);
|
||||
}
|
||||
return co;
|
||||
}
|
||||
#else
|
||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||
|
@ -426,8 +552,12 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#endif
|
||||
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
|
||||
#define CYTHON_PEP393_ENABLED 1
|
||||
#if defined(PyUnicode_IS_READY)
|
||||
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
|
||||
0 : _PyUnicode_Ready((PyObject *)(op)))
|
||||
#else
|
||||
#define __Pyx_PyUnicode_READY(op) (0)
|
||||
#endif
|
||||
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
|
||||
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
|
||||
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
|
||||
|
@ -436,7 +566,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
|
||||
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
|
||||
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
|
||||
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
|
||||
#else
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
|
||||
#endif
|
||||
#else
|
||||
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
|
||||
#endif
|
||||
|
@ -542,10 +676,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
#if PY_VERSION_HEX < 0x030200A4
|
||||
typedef long Py_hash_t;
|
||||
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
|
||||
#define __Pyx_PyInt_AsHash_t PyInt_AsLong
|
||||
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
|
||||
#else
|
||||
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
|
||||
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
|
||||
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
|
||||
#endif
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
|
||||
|
@ -570,8 +704,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
|||
} __Pyx_PyAsyncMethodsStruct;
|
||||
#endif
|
||||
|
||||
#if defined(WIN32) || defined(MS_WINDOWS)
|
||||
#define _USE_MATH_DEFINES
|
||||
#if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS)
|
||||
#if !defined(_USE_MATH_DEFINES)
|
||||
#define _USE_MATH_DEFINES
|
||||
#endif
|
||||
#endif
|
||||
#include <math.h>
|
||||
#ifdef NAN
|
||||
|
@ -705,6 +841,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
|
|||
(likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
|
||||
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
|
||||
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
|
||||
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
|
||||
#if CYTHON_ASSUME_SAFE_MACROS
|
||||
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
|
||||
#else
|
||||
|
@ -932,13 +1069,21 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
|
|||
#ifndef Py_MEMBER_SIZE
|
||||
#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
|
||||
#endif
|
||||
#if CYTHON_FAST_PYCALL
|
||||
static size_t __pyx_pyframe_localsplus_offset = 0;
|
||||
#include "frameobject.h"
|
||||
#if PY_VERSION_HEX >= 0x030b00a6
|
||||
#ifndef Py_BUILD_CORE
|
||||
#define Py_BUILD_CORE 1
|
||||
#endif
|
||||
#include "internal/pycore_frame.h"
|
||||
#endif
|
||||
#define __Pxy_PyFrame_Initialize_Offsets()\
|
||||
((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
|
||||
(void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
|
||||
#define __Pyx_PyFrame_GetLocalsplus(frame)\
|
||||
(assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
|
||||
#endif // CYTHON_FAST_PYCALL
|
||||
#endif
|
||||
|
||||
/* PyObjectCall.proto */
|
||||
|
@ -1055,6 +1200,11 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
|
|||
static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
||||
int py_line, const char *filename);
|
||||
|
||||
/* GCCDiagnostics.proto */
|
||||
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
|
||||
#define __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#endif
|
||||
|
||||
/* CIntToPy.proto */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
|
||||
|
||||
|
@ -1951,11 +2101,9 @@ if (!__Pyx_RefNanny) {
|
|||
#endif
|
||||
/*--- Library function declarations ---*/
|
||||
/*--- Threads initialization code ---*/
|
||||
#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
|
||||
#ifdef WITH_THREAD /* Python build with threading support? */
|
||||
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
|
||||
PyEval_InitThreads();
|
||||
#endif
|
||||
#endif
|
||||
/*--- Module creation code ---*/
|
||||
#if CYTHON_PEP489_MULTI_PHASE_INIT
|
||||
__pyx_m = __pyx_pyinit_module;
|
||||
|
@ -2388,7 +2536,7 @@ done:
|
|||
#if CYTHON_COMPILING_IN_CPYTHON
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
|
||||
PyObject *result;
|
||||
ternaryfunc call = func->ob_type->tp_call;
|
||||
ternaryfunc call = Py_TYPE(func)->tp_call;
|
||||
if (unlikely(!call))
|
||||
return PyObject_Call(func, arg, kw);
|
||||
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
|
||||
|
@ -2446,7 +2594,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObjec
|
|||
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
|
||||
return __Pyx_PyObject_CallMethO(func, arg);
|
||||
#if CYTHON_FAST_PYCCALL
|
||||
} else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {
|
||||
} else if (__Pyx_PyFastCFunction_Check(func)) {
|
||||
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
|
||||
#endif
|
||||
}
|
||||
|
@ -2607,7 +2755,7 @@ static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int
|
|||
}
|
||||
if (!use_cline) {
|
||||
c_line = 0;
|
||||
PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
|
||||
(void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
|
||||
}
|
||||
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
|
||||
c_line = 0;
|
||||
|
@ -2701,33 +2849,40 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
|
|||
#include "compile.h"
|
||||
#include "frameobject.h"
|
||||
#include "traceback.h"
|
||||
#if PY_VERSION_HEX >= 0x030b00a6
|
||||
#ifndef Py_BUILD_CORE
|
||||
#define Py_BUILD_CORE 1
|
||||
#endif
|
||||
#include "internal/pycore_frame.h"
|
||||
#endif
|
||||
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
|
||||
const char *funcname, int c_line,
|
||||
int py_line, const char *filename) {
|
||||
PyCodeObject *py_code = 0;
|
||||
PyObject *py_srcfile = 0;
|
||||
PyObject *py_funcname = 0;
|
||||
PyCodeObject *py_code = NULL;
|
||||
PyObject *py_funcname = NULL;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
PyObject *py_srcfile = NULL;
|
||||
py_srcfile = PyString_FromString(filename);
|
||||
#else
|
||||
py_srcfile = PyUnicode_FromString(filename);
|
||||
#endif
|
||||
if (!py_srcfile) goto bad;
|
||||
#endif
|
||||
if (c_line) {
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
|
||||
if (!py_funcname) goto bad;
|
||||
#else
|
||||
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
|
||||
if (!py_funcname) goto bad;
|
||||
funcname = PyUnicode_AsUTF8(py_funcname);
|
||||
if (!funcname) goto bad;
|
||||
#endif
|
||||
}
|
||||
else {
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_funcname = PyString_FromString(funcname);
|
||||
#else
|
||||
py_funcname = PyUnicode_FromString(funcname);
|
||||
if (!py_funcname) goto bad;
|
||||
#endif
|
||||
}
|
||||
if (!py_funcname) goto bad;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
py_code = __Pyx_PyCode_New(
|
||||
0,
|
||||
0,
|
||||
|
@ -2746,11 +2901,16 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
|
|||
__pyx_empty_bytes /*PyObject *lnotab*/
|
||||
);
|
||||
Py_DECREF(py_srcfile);
|
||||
Py_DECREF(py_funcname);
|
||||
#else
|
||||
py_code = PyCode_NewEmpty(filename, funcname, py_line);
|
||||
#endif
|
||||
Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline
|
||||
return py_code;
|
||||
bad:
|
||||
Py_XDECREF(py_srcfile);
|
||||
Py_XDECREF(py_funcname);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
Py_XDECREF(py_srcfile);
|
||||
#endif
|
||||
return NULL;
|
||||
}
|
||||
static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
||||
|
@ -2758,14 +2918,24 @@ static void __Pyx_AddTraceback(const char *funcname, int c_line,
|
|||
PyCodeObject *py_code = 0;
|
||||
PyFrameObject *py_frame = 0;
|
||||
PyThreadState *tstate = __Pyx_PyThreadState_Current;
|
||||
PyObject *ptype, *pvalue, *ptraceback;
|
||||
if (c_line) {
|
||||
c_line = __Pyx_CLineForTraceback(tstate, c_line);
|
||||
}
|
||||
py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
|
||||
if (!py_code) {
|
||||
__Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
|
||||
py_code = __Pyx_CreateCodeObjectForTraceback(
|
||||
funcname, c_line, py_line, filename);
|
||||
if (!py_code) goto bad;
|
||||
if (!py_code) {
|
||||
/* If the code object creation fails, then we should clear the
|
||||
fetched exception references and propagate the new exception */
|
||||
Py_XDECREF(ptype);
|
||||
Py_XDECREF(pvalue);
|
||||
Py_XDECREF(ptraceback);
|
||||
goto bad;
|
||||
}
|
||||
__Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
|
||||
__pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
|
||||
}
|
||||
py_frame = PyFrame_New(
|
||||
|
@ -2784,7 +2954,14 @@ bad:
|
|||
|
||||
/* CIntToPy */
|
||||
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
|
||||
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const long neg_one = (long) -1, const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
if (is_unsigned) {
|
||||
if (sizeof(long) < sizeof(long)) {
|
||||
|
@ -2837,7 +3014,14 @@ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
|
|||
|
||||
/* CIntFromPy */
|
||||
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
|
||||
const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const long neg_one = (long) -1, const_zero = (long) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (likely(PyInt_Check(x))) {
|
||||
|
@ -3026,7 +3210,14 @@ raise_neg_overflow:
|
|||
|
||||
/* CIntFromPy */
|
||||
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
|
||||
const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#endif
|
||||
const int neg_one = (int) -1, const_zero = (int) 0;
|
||||
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
const int is_unsigned = neg_one > const_zero;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (likely(PyInt_Check(x))) {
|
||||
|
@ -3315,11 +3506,33 @@ static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObj
|
|||
|
||||
/* CheckBinaryVersion */
|
||||
static int __Pyx_check_binary_version(void) {
|
||||
char ctversion[4], rtversion[4];
|
||||
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
|
||||
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
|
||||
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
|
||||
char ctversion[5];
|
||||
int same=1, i, found_dot;
|
||||
const char* rt_from_call = Py_GetVersion();
|
||||
PyOS_snprintf(ctversion, 5, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
|
||||
found_dot = 0;
|
||||
for (i = 0; i < 4; i++) {
|
||||
if (!ctversion[i]) {
|
||||
same = (rt_from_call[i] < '0' || rt_from_call[i] > '9');
|
||||
break;
|
||||
}
|
||||
if (rt_from_call[i] != ctversion[i]) {
|
||||
same = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!same) {
|
||||
char rtversion[5] = {'\0'};
|
||||
char message[200];
|
||||
for (i=0; i<4; ++i) {
|
||||
if (rt_from_call[i] == '.') {
|
||||
if (found_dot) break;
|
||||
found_dot = 1;
|
||||
} else if (rt_from_call[i] < '0' || rt_from_call[i] > '9') {
|
||||
break;
|
||||
}
|
||||
rtversion[i] = rt_from_call[i];
|
||||
}
|
||||
PyOS_snprintf(message, sizeof(message),
|
||||
"compiletime version %s of module '%.100s' "
|
||||
"does not match runtime version %s",
|
||||
|
@ -3577,6 +3790,23 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
|
|||
Py_DECREF(x);
|
||||
return ival;
|
||||
}
|
||||
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) {
|
||||
if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) {
|
||||
return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o);
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
} else if (likely(PyInt_CheckExact(o))) {
|
||||
return PyInt_AS_LONG(o);
|
||||
#endif
|
||||
} else {
|
||||
Py_ssize_t ival;
|
||||
PyObject *x;
|
||||
x = PyNumber_Index(o);
|
||||
if (!x) return -1;
|
||||
ival = PyInt_AsLong(x);
|
||||
Py_DECREF(x);
|
||||
return ival;
|
||||
}
|
||||
}
|
||||
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
|
||||
return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
|
||||
}
|
||||
|
|
|
@ -135,6 +135,9 @@ else:
|
|||
IterableBase = Iterable
|
||||
|
||||
|
||||
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
||||
|
||||
|
||||
class AbstractCookieJar(Sized, IterableBase):
|
||||
"""Abstract Cookie Jar."""
|
||||
|
||||
|
@ -142,8 +145,12 @@ class AbstractCookieJar(Sized, IterableBase):
|
|||
self._loop = get_running_loop(loop)
|
||||
|
||||
@abstractmethod
|
||||
def clear(self) -> None:
|
||||
"""Clear all cookies."""
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
"""Clear all cookies if no predicate is passed."""
|
||||
|
||||
@abstractmethod
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
"""Clear all cookies for domain and all subdomains."""
|
||||
|
||||
@abstractmethod
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
|
@ -159,7 +166,7 @@ class AbstractStreamWriter(ABC):
|
|||
|
||||
buffer_size = 0
|
||||
output_size = 0
|
||||
length = 0 # type: Optional[int]
|
||||
length: Optional[int] = 0
|
||||
|
||||
@abstractmethod
|
||||
async def write(self, chunk: bytes) -> None:
|
||||
|
|
|
@ -15,13 +15,17 @@ class BaseProtocol(asyncio.Protocol):
|
|||
)
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop # type: asyncio.AbstractEventLoop
|
||||
self._loop: asyncio.AbstractEventLoop = loop
|
||||
self._paused = False
|
||||
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._connection_lost = False
|
||||
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._reading_paused = False
|
||||
|
||||
self.transport = None # type: Optional[asyncio.Transport]
|
||||
self.transport: Optional[asyncio.Transport] = None
|
||||
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
"""Return True if the connection is open."""
|
||||
return self.transport is not None
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
assert not self._paused
|
||||
|
@ -59,7 +63,6 @@ class BaseProtocol(asyncio.Protocol):
|
|||
self.transport = tr
|
||||
|
||||
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
||||
self._connection_lost = True
|
||||
# Wake up the writer if currently paused.
|
||||
self.transport = None
|
||||
if not self._paused:
|
||||
|
@ -76,12 +79,12 @@ class BaseProtocol(asyncio.Protocol):
|
|||
waiter.set_exception(exc)
|
||||
|
||||
async def _drain_helper(self) -> None:
|
||||
if self._connection_lost:
|
||||
if not self.connected:
|
||||
raise ConnectionResetError("Connection lost")
|
||||
if not self._paused:
|
||||
return
|
||||
waiter = self._drain_waiter
|
||||
assert waiter is None or waiter.cancelled()
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await waiter
|
||||
if waiter is None:
|
||||
waiter = self._loop.create_future()
|
||||
self._drain_waiter = waiter
|
||||
await asyncio.shield(waiter)
|
||||
|
|
|
@ -8,6 +8,7 @@ import os
|
|||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from contextlib import suppress
|
||||
from types import SimpleNamespace, TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
|
@ -74,10 +75,10 @@ from .helpers import (
|
|||
DEBUG,
|
||||
PY_36,
|
||||
BasicAuth,
|
||||
CeilTimeout,
|
||||
TimeoutHandle,
|
||||
ceil_timeout,
|
||||
get_env_proxy_for_url,
|
||||
get_running_loop,
|
||||
proxies_from_env,
|
||||
sentinel,
|
||||
strip_auth_from_url,
|
||||
)
|
||||
|
@ -85,7 +86,7 @@ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
|||
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
|
||||
from .streams import FlowControlDataQueue
|
||||
from .tracing import Trace, TraceConfig
|
||||
from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
|
||||
from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
|
||||
|
||||
__all__ = (
|
||||
# client_exceptions
|
||||
|
@ -130,7 +131,7 @@ __all__ = (
|
|||
try:
|
||||
from ssl import SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
SSLContext = object # type: ignore
|
||||
SSLContext = object # type: ignore[misc,assignment]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
|
@ -155,7 +156,7 @@ class ClientTimeout:
|
|||
|
||||
|
||||
# 5 Minute default read timeout
|
||||
DEFAULT_TIMEOUT = ClientTimeout(total=5 * 60)
|
||||
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
|
||||
|
||||
_RetType = TypeVar("_RetType")
|
||||
|
||||
|
@ -165,6 +166,7 @@ class ClientSession:
|
|||
|
||||
ATTRS = frozenset(
|
||||
[
|
||||
"_base_url",
|
||||
"_source_traceback",
|
||||
"_connector",
|
||||
"requote_redirect_url",
|
||||
|
@ -189,10 +191,12 @@ class ClientSession:
|
|||
]
|
||||
)
|
||||
|
||||
_source_traceback = None
|
||||
_source_traceback = None # type: Optional[traceback.StackSummary]
|
||||
_connector = None # type: Optional[BaseConnector]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: Optional[StrOrURL] = None,
|
||||
*,
|
||||
connector: Optional[BaseConnector] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
|
@ -215,15 +219,22 @@ class ClientSession:
|
|||
trust_env: bool = False,
|
||||
requote_redirect_url: bool = True,
|
||||
trace_configs: Optional[List[TraceConfig]] = None,
|
||||
read_bufsize: int = 2 ** 16,
|
||||
read_bufsize: int = 2**16,
|
||||
) -> None:
|
||||
|
||||
if loop is None:
|
||||
if connector is not None:
|
||||
loop = connector._loop
|
||||
|
||||
loop = get_running_loop(loop)
|
||||
|
||||
if base_url is None or isinstance(base_url, URL):
|
||||
self._base_url: Optional[URL] = base_url
|
||||
else:
|
||||
self._base_url = URL(base_url)
|
||||
assert (
|
||||
self._base_url.origin() == self._base_url
|
||||
), "Only absolute URLs without path part are supported"
|
||||
|
||||
if connector is None:
|
||||
connector = TCPConnector(loop=loop)
|
||||
|
||||
|
@ -242,7 +253,7 @@ class ClientSession:
|
|||
if cookies is not None:
|
||||
self._cookie_jar.update_cookies(cookies)
|
||||
|
||||
self._connector = connector # type: Optional[BaseConnector]
|
||||
self._connector = connector
|
||||
self._connector_owner = connector_owner
|
||||
self._default_auth = auth
|
||||
self._version = version
|
||||
|
@ -264,7 +275,7 @@ class ClientSession:
|
|||
stacklevel=2,
|
||||
)
|
||||
else:
|
||||
self._timeout = timeout # type: ignore
|
||||
self._timeout = timeout # type: ignore[assignment]
|
||||
if read_timeout is not sentinel:
|
||||
raise ValueError(
|
||||
"read_timeout and timeout parameters "
|
||||
|
@ -285,12 +296,12 @@ class ClientSession:
|
|||
|
||||
# Convert to list of tuples
|
||||
if headers:
|
||||
real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
||||
real_headers: CIMultiDict[str] = CIMultiDict(headers)
|
||||
else:
|
||||
real_headers = CIMultiDict()
|
||||
self._default_headers = real_headers # type: CIMultiDict[str]
|
||||
self._default_headers: CIMultiDict[str] = real_headers
|
||||
if skip_auto_headers is not None:
|
||||
self._skip_auto_headers = frozenset([istr(i) for i in skip_auto_headers])
|
||||
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
||||
else:
|
||||
self._skip_auto_headers = frozenset()
|
||||
|
||||
|
@ -342,6 +353,14 @@ class ClientSession:
|
|||
"""Perform HTTP request."""
|
||||
return _RequestContextManager(self._request(method, url, **kwargs))
|
||||
|
||||
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
||||
url = URL(str_or_url)
|
||||
if self._base_url is None:
|
||||
return url
|
||||
else:
|
||||
assert not url.is_absolute() and url.path.startswith("/")
|
||||
return self._base_url.join(url)
|
||||
|
||||
async def _request(
|
||||
self,
|
||||
method: str,
|
||||
|
@ -401,7 +420,7 @@ class ClientSession:
|
|||
proxy_headers = self._prepare_headers(proxy_headers)
|
||||
|
||||
try:
|
||||
url = URL(str_or_url)
|
||||
url = self._build_url(str_or_url)
|
||||
except ValueError as e:
|
||||
raise InvalidURL(str_or_url) from e
|
||||
|
||||
|
@ -417,10 +436,10 @@ class ClientSession:
|
|||
raise InvalidURL(proxy) from e
|
||||
|
||||
if timeout is sentinel:
|
||||
real_timeout = self._timeout # type: ClientTimeout
|
||||
real_timeout: ClientTimeout = self._timeout
|
||||
else:
|
||||
if not isinstance(timeout, ClientTimeout):
|
||||
real_timeout = ClientTimeout(total=timeout) # type: ignore
|
||||
real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
|
||||
else:
|
||||
real_timeout = timeout
|
||||
# timeout is cumulative for all request operations
|
||||
|
@ -441,7 +460,7 @@ class ClientSession:
|
|||
]
|
||||
|
||||
for trace in traces:
|
||||
await trace.send_request_start(method, url, headers)
|
||||
await trace.send_request_start(method, url.update_query(params), headers)
|
||||
|
||||
timer = tm.timer()
|
||||
try:
|
||||
|
@ -483,11 +502,8 @@ class ClientSession:
|
|||
if proxy is not None:
|
||||
proxy = URL(proxy)
|
||||
elif self._trust_env:
|
||||
for scheme, proxy_info in proxies_from_env().items():
|
||||
if scheme == url.scheme:
|
||||
proxy = proxy_info.proxy
|
||||
proxy_auth = proxy_info.proxy_auth
|
||||
break
|
||||
with suppress(LookupError):
|
||||
proxy, proxy_auth = get_env_proxy_for_url(url)
|
||||
|
||||
req = self._request_class(
|
||||
method,
|
||||
|
@ -515,7 +531,7 @@ class ClientSession:
|
|||
|
||||
# connection timeout
|
||||
try:
|
||||
with CeilTimeout(real_timeout.connect, loop=self._loop):
|
||||
async with ceil_timeout(real_timeout.connect):
|
||||
assert self._connector is not None
|
||||
conn = await self._connector.connect(
|
||||
req, traces=traces, timeout=real_timeout
|
||||
|
@ -551,6 +567,8 @@ class ClientSession:
|
|||
except ClientError:
|
||||
raise
|
||||
except OSError as exc:
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
raise ClientOSError(*exc.args) from exc
|
||||
|
||||
self._cookie_jar.update_cookies(resp.cookies, resp.url)
|
||||
|
@ -560,7 +578,7 @@ class ClientSession:
|
|||
|
||||
for trace in traces:
|
||||
await trace.send_request_redirect(
|
||||
method, url, headers, resp
|
||||
method, url.update_query(params), headers, resp
|
||||
)
|
||||
|
||||
redirects += 1
|
||||
|
@ -634,7 +652,9 @@ class ClientSession:
|
|||
resp._history = tuple(history)
|
||||
|
||||
for trace in traces:
|
||||
await trace.send_request_end(method, url, headers, resp)
|
||||
await trace.send_request_end(
|
||||
method, url.update_query(params), headers, resp
|
||||
)
|
||||
return resp
|
||||
|
||||
except BaseException as e:
|
||||
|
@ -645,7 +665,9 @@ class ClientSession:
|
|||
handle = None
|
||||
|
||||
for trace in traces:
|
||||
await trace.send_request_exception(method, url, headers, e)
|
||||
await trace.send_request_exception(
|
||||
method, url.update_query(params), headers, e
|
||||
)
|
||||
raise
|
||||
|
||||
def ws_connect(
|
||||
|
@ -661,6 +683,7 @@ class ClientSession:
|
|||
heartbeat: Optional[float] = None,
|
||||
auth: Optional[BasicAuth] = None,
|
||||
origin: Optional[str] = None,
|
||||
params: Optional[Mapping[str, str]] = None,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
proxy: Optional[StrOrURL] = None,
|
||||
proxy_auth: Optional[BasicAuth] = None,
|
||||
|
@ -685,6 +708,7 @@ class ClientSession:
|
|||
heartbeat=heartbeat,
|
||||
auth=auth,
|
||||
origin=origin,
|
||||
params=params,
|
||||
headers=headers,
|
||||
proxy=proxy,
|
||||
proxy_auth=proxy_auth,
|
||||
|
@ -711,6 +735,7 @@ class ClientSession:
|
|||
heartbeat: Optional[float] = None,
|
||||
auth: Optional[BasicAuth] = None,
|
||||
origin: Optional[str] = None,
|
||||
params: Optional[Mapping[str, str]] = None,
|
||||
headers: Optional[LooseHeaders] = None,
|
||||
proxy: Optional[StrOrURL] = None,
|
||||
proxy_auth: Optional[BasicAuth] = None,
|
||||
|
@ -724,7 +749,7 @@ class ClientSession:
|
|||
) -> ClientWebSocketResponse:
|
||||
|
||||
if headers is None:
|
||||
real_headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
real_headers: CIMultiDict[str] = CIMultiDict()
|
||||
else:
|
||||
real_headers = CIMultiDict(headers)
|
||||
|
||||
|
@ -754,6 +779,7 @@ class ClientSession:
|
|||
resp = await self.request(
|
||||
method,
|
||||
url,
|
||||
params=params,
|
||||
headers=real_headers,
|
||||
read_until_eof=False,
|
||||
auth=auth,
|
||||
|
@ -842,9 +868,9 @@ class ClientSession:
|
|||
assert conn_proto is not None
|
||||
transport = conn.transport
|
||||
assert transport is not None
|
||||
reader = FlowControlDataQueue(
|
||||
conn_proto, 2 ** 16, loop=self._loop
|
||||
) # type: FlowControlDataQueue[WSMessage]
|
||||
reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
|
||||
conn_proto, 2**16, loop=self._loop
|
||||
)
|
||||
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
||||
writer = WebSocketWriter(
|
||||
conn_proto,
|
||||
|
@ -879,7 +905,7 @@ class ClientSession:
|
|||
if headers:
|
||||
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
||||
headers = CIMultiDict(headers)
|
||||
added_names = set() # type: Set[str]
|
||||
added_names: Set[str] = set()
|
||||
for key, value in headers.items():
|
||||
if key in added_names:
|
||||
result.add(key, value)
|
||||
|
@ -1001,7 +1027,7 @@ class ClientSession:
|
|||
return self._loop
|
||||
|
||||
@property
|
||||
def timeout(self) -> Union[object, ClientTimeout]:
|
||||
def timeout(self) -> ClientTimeout:
|
||||
"""Timeout for the session."""
|
||||
return self._timeout
|
||||
|
||||
|
@ -1034,23 +1060,21 @@ class ClientSession:
|
|||
def raise_for_status(
|
||||
self,
|
||||
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
||||
"""
|
||||
Should `ClientResponse.raise_for_status()`
|
||||
be called for each response
|
||||
"""
|
||||
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
||||
return self._raise_for_status
|
||||
|
||||
@property
|
||||
def auto_decompress(self) -> bool:
|
||||
"""Should the body response be automatically decompressed"""
|
||||
"""Should the body response be automatically decompressed."""
|
||||
return self._auto_decompress
|
||||
|
||||
@property
|
||||
def trust_env(self) -> bool:
|
||||
"""
|
||||
Should get proxies information
|
||||
from HTTP_PROXY / HTTPS_PROXY environment variables
|
||||
or ~/.netrc file if present
|
||||
Should proxies information from environment or netrc be trusted.
|
||||
|
||||
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
||||
or ~/.netrc file if present.
|
||||
"""
|
||||
return self._trust_env
|
||||
|
||||
|
@ -1100,7 +1124,7 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType
|
|||
def send(self, arg: None) -> "asyncio.Future[Any]":
|
||||
return self._coro.send(arg)
|
||||
|
||||
def throw(self, arg: BaseException) -> None: # type: ignore
|
||||
def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
|
||||
self._coro.throw(arg)
|
||||
|
||||
def close(self) -> None:
|
||||
|
@ -1119,6 +1143,8 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType
|
|||
|
||||
|
||||
class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
|
||||
__slots__ = ()
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
|
@ -1134,6 +1160,8 @@ class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
|
|||
|
||||
|
||||
class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
|
||||
__slots__ = ()
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
|
@ -1153,7 +1181,7 @@ class _SessionRequestContextManager:
|
|||
session: ClientSession,
|
||||
) -> None:
|
||||
self._coro = coro
|
||||
self._resp = None # type: Optional[ClientResponse]
|
||||
self._resp: Optional[ClientResponse] = None
|
||||
self._session = session
|
||||
|
||||
async def __aenter__(self) -> ClientResponse:
|
||||
|
@ -1202,7 +1230,9 @@ def request(
|
|||
read_bufsize: Optional[int] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> _SessionRequestContextManager:
|
||||
"""Constructs and sends a request. Returns response object.
|
||||
"""Constructs and sends a request.
|
||||
|
||||
Returns response object.
|
||||
method - HTTP method
|
||||
url - request url
|
||||
params - (optional) Dictionary or bytes to be sent in the query
|
||||
|
|
|
@ -4,6 +4,7 @@ import asyncio
|
|||
import warnings
|
||||
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
||||
|
||||
from .http_parser import RawResponseMessage
|
||||
from .typedefs import LooseHeaders
|
||||
|
||||
try:
|
||||
|
@ -11,7 +12,7 @@ try:
|
|||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = SSLContext = None # type: ignore
|
||||
ssl = SSLContext = None # type: ignore[assignment]
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
|
@ -99,7 +100,7 @@ class ClientResponseError(ClientError):
|
|||
args += f", message={self.message!r}"
|
||||
if self.headers is not None:
|
||||
args += f", headers={self.headers!r}"
|
||||
return "{}({})".format(type(self).__name__, args)
|
||||
return f"{type(self).__name__}({args})"
|
||||
|
||||
@property
|
||||
def code(self) -> int:
|
||||
|
@ -153,7 +154,7 @@ class ClientConnectorError(ClientOSError):
|
|||
"""Client connector error.
|
||||
|
||||
Raised in :class:`aiohttp.connector.TCPConnector` if
|
||||
connection to proxy can not be established.
|
||||
a connection can not be established.
|
||||
"""
|
||||
|
||||
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
||||
|
@ -195,6 +196,29 @@ class ClientProxyConnectionError(ClientConnectorError):
|
|||
"""
|
||||
|
||||
|
||||
class UnixClientConnectorError(ClientConnectorError):
|
||||
"""Unix connector error.
|
||||
|
||||
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
||||
if connection to unix socket can not be established.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
||||
) -> None:
|
||||
self._path = path
|
||||
super().__init__(connection_key, os_error)
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
||||
self, self.ssl if self.ssl is not None else "default", self.strerror
|
||||
)
|
||||
|
||||
|
||||
class ServerConnectionError(ClientConnectionError):
|
||||
"""Server connection errors."""
|
||||
|
||||
|
@ -202,7 +226,7 @@ class ServerConnectionError(ClientConnectionError):
|
|||
class ServerDisconnectedError(ServerConnectionError):
|
||||
"""Server disconnected."""
|
||||
|
||||
def __init__(self, message: Optional[str] = None) -> None:
|
||||
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
||||
if message is None:
|
||||
message = "Server disconnected"
|
||||
|
||||
|
@ -238,7 +262,8 @@ class InvalidURL(ClientError, ValueError):
|
|||
"""Invalid URL.
|
||||
|
||||
URL used for fetching is malformed, e.g. it doesn't contains host
|
||||
part."""
|
||||
part.
|
||||
"""
|
||||
|
||||
# Derive from ValueError for backward compatibility
|
||||
|
||||
|
@ -279,11 +304,11 @@ else: # pragma: no cover
|
|||
ssl_error_bases = (ClientSSLError,)
|
||||
|
||||
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
|
||||
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
||||
"""Response ssl error."""
|
||||
|
||||
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
||||
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
||||
"""Response certificate error."""
|
||||
|
||||
def __init__(
|
||||
|
|
|
@ -23,7 +23,7 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
|
|||
|
||||
self._should_close = False
|
||||
|
||||
self._payload = None
|
||||
self._payload: Optional[StreamReader] = None
|
||||
self._skip_payload = False
|
||||
self._payload_parser = None
|
||||
|
||||
|
@ -31,10 +31,10 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
|
|||
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._parser = None # type: Optional[HttpResponseParser]
|
||||
self._parser: Optional[HttpResponseParser] = None
|
||||
|
||||
self._read_timeout = None # type: Optional[float]
|
||||
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
||||
self._read_timeout: Optional[float] = None
|
||||
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
||||
|
||||
@property
|
||||
def upgraded(self) -> bool:
|
||||
|
@ -142,7 +142,7 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
|
|||
read_until_eof: bool = False,
|
||||
auto_decompress: bool = True,
|
||||
read_timeout: Optional[float] = None,
|
||||
read_bufsize: int = 2 ** 16
|
||||
read_bufsize: int = 2**16,
|
||||
) -> None:
|
||||
self._skip_payload = skip_payload
|
||||
|
||||
|
@ -223,7 +223,7 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
|
|||
|
||||
self._upgraded = upgraded
|
||||
|
||||
payload = None
|
||||
payload: Optional[StreamReader] = None
|
||||
for message, payload in messages:
|
||||
if message.should_close:
|
||||
self._should_close = True
|
||||
|
@ -231,7 +231,7 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
|
|||
self._payload = payload
|
||||
|
||||
if self._skip_payload or message.code in (204, 304):
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore
|
||||
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
||||
else:
|
||||
self.feed_data((message, payload), 0)
|
||||
if payload is not None:
|
||||
|
|
|
@ -63,13 +63,13 @@ try:
|
|||
import ssl
|
||||
from ssl import SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = None # type: ignore
|
||||
SSLContext = object # type: ignore
|
||||
ssl = None # type: ignore[assignment]
|
||||
SSLContext = object # type: ignore[misc,assignment]
|
||||
|
||||
try:
|
||||
import cchardet as chardet
|
||||
except ImportError: # pragma: no cover
|
||||
import chardet # type: ignore
|
||||
import charset_normalizer as chardet # type: ignore[no-redef]
|
||||
|
||||
|
||||
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
||||
|
@ -294,7 +294,7 @@ class ClientRequest:
|
|||
real_response_class = ClientResponse
|
||||
else:
|
||||
real_response_class = response_class
|
||||
self.response_class = real_response_class # type: Type[ClientResponse]
|
||||
self.response_class: Type[ClientResponse] = real_response_class
|
||||
self._timer = timer if timer is not None else TimerNoop()
|
||||
self._ssl = ssl
|
||||
|
||||
|
@ -311,7 +311,7 @@ class ClientRequest:
|
|||
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
||||
|
||||
self.update_body_from_data(data)
|
||||
if data or self.method not in self.GET_METHODS:
|
||||
if data is not None or self.method not in self.GET_METHODS:
|
||||
self.update_transfer_encoding()
|
||||
self.update_expect_continue(expect100)
|
||||
if traces is None:
|
||||
|
@ -329,9 +329,7 @@ class ClientRequest:
|
|||
def connection_key(self) -> ConnectionKey:
|
||||
proxy_headers = self.proxy_headers
|
||||
if proxy_headers:
|
||||
h = hash(
|
||||
tuple((k, v) for k, v in proxy_headers.items())
|
||||
) # type: Optional[int]
|
||||
h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
|
||||
else:
|
||||
h = None
|
||||
return ConnectionKey(
|
||||
|
@ -356,7 +354,7 @@ class ClientRequest:
|
|||
|
||||
@property
|
||||
def request_info(self) -> RequestInfo:
|
||||
headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
|
||||
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
|
||||
return RequestInfo(self.url, self.method, headers, self.original_url)
|
||||
|
||||
def update_host(self, url: URL) -> None:
|
||||
|
@ -387,7 +385,7 @@ class ClientRequest:
|
|||
|
||||
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
||||
"""Update request headers."""
|
||||
self.headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
self.headers: CIMultiDict[str] = CIMultiDict()
|
||||
|
||||
# add host
|
||||
netloc = cast(str, self.url.raw_host)
|
||||
|
@ -399,9 +397,9 @@ class ClientRequest:
|
|||
|
||||
if headers:
|
||||
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
||||
headers = headers.items() # type: ignore
|
||||
headers = headers.items() # type: ignore[assignment]
|
||||
|
||||
for key, value in headers: # type: ignore
|
||||
for key, value in headers: # type: ignore[misc]
|
||||
# A special case for Host header
|
||||
if key.lower() == "host":
|
||||
self.headers[key] = value
|
||||
|
@ -413,7 +411,7 @@ class ClientRequest:
|
|||
(hdr, None) for hdr in sorted(skip_auto_headers)
|
||||
)
|
||||
used_headers = self.headers.copy()
|
||||
used_headers.extend(self.skip_auto_headers) # type: ignore
|
||||
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
|
||||
|
||||
for hdr, val in self.DEFAULT_HEADERS.items():
|
||||
if hdr not in used_headers:
|
||||
|
@ -427,7 +425,7 @@ class ClientRequest:
|
|||
if not cookies:
|
||||
return
|
||||
|
||||
c = SimpleCookie() # type: SimpleCookie[str]
|
||||
c: SimpleCookie[str] = SimpleCookie()
|
||||
if hdrs.COOKIE in self.headers:
|
||||
c.load(self.headers.get(hdrs.COOKIE, ""))
|
||||
del self.headers[hdrs.COOKIE]
|
||||
|
@ -435,7 +433,7 @@ class ClientRequest:
|
|||
if isinstance(cookies, Mapping):
|
||||
iter_cookies = cookies.items()
|
||||
else:
|
||||
iter_cookies = cookies # type: ignore
|
||||
iter_cookies = cookies # type: ignore[assignment]
|
||||
for name, value in iter_cookies:
|
||||
if isinstance(value, Morsel):
|
||||
# Preserve coded_value
|
||||
|
@ -443,13 +441,13 @@ class ClientRequest:
|
|||
mrsl_val.set(value.key, value.value, value.coded_value)
|
||||
c[name] = mrsl_val
|
||||
else:
|
||||
c[name] = value # type: ignore
|
||||
c[name] = value # type: ignore[assignment]
|
||||
|
||||
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
||||
|
||||
def update_content_encoding(self, data: Any) -> None:
|
||||
"""Set request content encoding."""
|
||||
if not data:
|
||||
if data is None:
|
||||
return
|
||||
|
||||
enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
|
||||
|
@ -499,7 +497,7 @@ class ClientRequest:
|
|||
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
||||
|
||||
def update_body_from_data(self, body: Any) -> None:
|
||||
if not body:
|
||||
if body is None:
|
||||
return
|
||||
|
||||
# FormData
|
||||
|
@ -547,8 +545,6 @@ class ClientRequest:
|
|||
proxy_auth: Optional[BasicAuth],
|
||||
proxy_headers: Optional[LooseHeaders],
|
||||
) -> None:
|
||||
if proxy and not proxy.scheme == "http":
|
||||
raise ValueError("Only http proxies are supported")
|
||||
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
||||
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
||||
self.proxy = proxy
|
||||
|
@ -585,19 +581,22 @@ class ClientRequest:
|
|||
await self.body.write(writer)
|
||||
else:
|
||||
if isinstance(self.body, (bytes, bytearray)):
|
||||
self.body = (self.body,) # type: ignore
|
||||
self.body = (self.body,) # type: ignore[assignment]
|
||||
|
||||
for chunk in self.body:
|
||||
await writer.write(chunk) # type: ignore
|
||||
await writer.write(chunk) # type: ignore[arg-type]
|
||||
|
||||
await writer.write_eof()
|
||||
except OSError as exc:
|
||||
new_exc = ClientOSError(
|
||||
exc.errno, "Can not write request body for %s" % self.url
|
||||
)
|
||||
new_exc.__context__ = exc
|
||||
new_exc.__cause__ = exc
|
||||
protocol.set_exception(new_exc)
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
protocol.set_exception(exc)
|
||||
else:
|
||||
new_exc = ClientOSError(
|
||||
exc.errno, "Can not write request body for %s" % self.url
|
||||
)
|
||||
new_exc.__context__ = exc
|
||||
new_exc.__cause__ = exc
|
||||
protocol.set_exception(new_exc)
|
||||
except asyncio.CancelledError as exc:
|
||||
if not conn.closed:
|
||||
protocol.set_exception(exc)
|
||||
|
@ -632,6 +631,9 @@ class ClientRequest:
|
|||
on_chunk_sent=functools.partial(
|
||||
self._on_chunk_request_sent, self.method, self.url
|
||||
),
|
||||
on_headers_sent=functools.partial(
|
||||
self._on_headers_request_sent, self.method, self.url
|
||||
),
|
||||
)
|
||||
|
||||
if self.compress:
|
||||
|
@ -701,17 +703,23 @@ class ClientRequest:
|
|||
for trace in self._traces:
|
||||
await trace.send_request_chunk_sent(method, url, chunk)
|
||||
|
||||
async def _on_headers_request_sent(
|
||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
for trace in self._traces:
|
||||
await trace.send_request_headers(method, url, headers)
|
||||
|
||||
|
||||
class ClientResponse(HeadersMixin):
|
||||
|
||||
# from the Status-Line of the response
|
||||
version = None # HTTP-Version
|
||||
status = None # type: int # Status-Code
|
||||
status: int = None # type: ignore[assignment] # Status-Code
|
||||
reason = None # Reason-Phrase
|
||||
|
||||
content = None # type: StreamReader # Payload stream
|
||||
_headers = None # type: CIMultiDictProxy[str] # Response headers
|
||||
_raw_headers = None # type: RawHeaders # Response raw headers
|
||||
content: StreamReader = None # type: ignore[assignment] # Payload stream
|
||||
_headers: "CIMultiDictProxy[str]" = None # type: ignore[assignment]
|
||||
_raw_headers: RawHeaders = None # type: ignore[assignment] # Response raw headers
|
||||
|
||||
_connection = None # current connection
|
||||
_source_traceback = None
|
||||
|
@ -736,22 +744,22 @@ class ClientResponse(HeadersMixin):
|
|||
assert isinstance(url, URL)
|
||||
|
||||
self.method = method
|
||||
self.cookies = SimpleCookie() # type: SimpleCookie[str]
|
||||
self.cookies: SimpleCookie[str] = SimpleCookie()
|
||||
|
||||
self._real_url = url
|
||||
self._url = url.with_fragment(None)
|
||||
self._body = None # type: Any
|
||||
self._writer = writer # type: Optional[asyncio.Task[None]]
|
||||
self._body: Any = None
|
||||
self._writer: Optional[asyncio.Task[None]] = writer
|
||||
self._continue = continue100 # None by default
|
||||
self._closed = True
|
||||
self._history = () # type: Tuple[ClientResponse, ...]
|
||||
self._history: Tuple[ClientResponse, ...] = ()
|
||||
self._request_info = request_info
|
||||
self._timer = timer if timer is not None else TimerNoop()
|
||||
self._cache = {} # type: Dict[str, Any]
|
||||
self._cache: Dict[str, Any] = {}
|
||||
self._traces = traces
|
||||
self._loop = loop
|
||||
# store a reference to session #1985
|
||||
self._session = session # type: Optional[ClientSession]
|
||||
self._session: Optional[ClientSession] = session
|
||||
if loop.get_debug():
|
||||
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
||||
|
||||
|
@ -848,7 +856,7 @@ class ClientResponse(HeadersMixin):
|
|||
if not links_str:
|
||||
return MultiDictProxy(MultiDict())
|
||||
|
||||
links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]]
|
||||
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
|
||||
|
||||
for val in re.split(r",(?=\s*<)", links_str):
|
||||
match = re.match(r"\s*<(.*)>(.*)", val)
|
||||
|
@ -858,7 +866,7 @@ class ClientResponse(HeadersMixin):
|
|||
url, params_str = match.groups()
|
||||
params = params_str.split(";")[1:]
|
||||
|
||||
link = MultiDict() # type: MultiDict[Union[str, URL]]
|
||||
link: MultiDict[Union[str, URL]] = MultiDict()
|
||||
|
||||
for param in params:
|
||||
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
||||
|
@ -869,7 +877,7 @@ class ClientResponse(HeadersMixin):
|
|||
|
||||
link.add(key, value)
|
||||
|
||||
key = link.get("rel", url) # type: ignore
|
||||
key = link.get("rel", url) # type: ignore[assignment]
|
||||
|
||||
link.add("url", self.url.join(URL(url)))
|
||||
|
||||
|
@ -887,7 +895,8 @@ class ClientResponse(HeadersMixin):
|
|||
while True:
|
||||
# read response
|
||||
try:
|
||||
message, payload = await self._protocol.read() # type: ignore
|
||||
protocol = self._protocol
|
||||
message, payload = await protocol.read() # type: ignore[union-attr]
|
||||
except http.HttpProcessingError as exc:
|
||||
raise ClientResponseError(
|
||||
self.request_info,
|
||||
|
@ -986,14 +995,10 @@ class ClientResponse(HeadersMixin):
|
|||
This is **not** a check for ``200 OK`` but a check that the response
|
||||
status is under 400.
|
||||
"""
|
||||
try:
|
||||
self.raise_for_status()
|
||||
except ClientResponseError:
|
||||
return False
|
||||
return True
|
||||
return 400 > self.status
|
||||
|
||||
def raise_for_status(self) -> None:
|
||||
if 400 <= self.status:
|
||||
if not self.ok:
|
||||
# reason should always be not None for a started response
|
||||
assert self.reason is not None
|
||||
self.release()
|
||||
|
@ -1040,7 +1045,7 @@ class ClientResponse(HeadersMixin):
|
|||
elif self._released:
|
||||
raise ClientConnectionError("Connection closed")
|
||||
|
||||
return self._body
|
||||
return self._body # type: ignore[no-any-return]
|
||||
|
||||
def get_encoding(self) -> str:
|
||||
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
||||
|
@ -1078,7 +1083,9 @@ class ClientResponse(HeadersMixin):
|
|||
if encoding is None:
|
||||
encoding = self.get_encoding()
|
||||
|
||||
return self._body.decode(encoding, errors=errors) # type: ignore
|
||||
return self._body.decode( # type: ignore[no-any-return,union-attr]
|
||||
encoding, errors=errors
|
||||
)
|
||||
|
||||
async def json(
|
||||
self,
|
||||
|
@ -1103,7 +1110,7 @@ class ClientResponse(HeadersMixin):
|
|||
headers=self.headers,
|
||||
)
|
||||
|
||||
stripped = self._body.strip() # type: ignore
|
||||
stripped = self._body.strip() # type: ignore[union-attr]
|
||||
if not stripped:
|
||||
return None
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""WebSocket client for asyncio."""
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Optional
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import async_timeout
|
||||
|
||||
|
@ -12,6 +12,7 @@ from .http import (
|
|||
WS_CLOSED_MESSAGE,
|
||||
WS_CLOSING_MESSAGE,
|
||||
WebSocketError,
|
||||
WSCloseCode,
|
||||
WSMessage,
|
||||
WSMsgType,
|
||||
)
|
||||
|
@ -50,19 +51,19 @@ class ClientWebSocketResponse:
|
|||
self._protocol = protocol
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._close_code = None # type: Optional[int]
|
||||
self._close_code: Optional[int] = None
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb = None
|
||||
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb = None
|
||||
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
||||
self._loop = loop
|
||||
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._waiting: Optional[asyncio.Future[bool]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._compress = compress
|
||||
self._client_notakeover = client_notakeover
|
||||
|
||||
|
@ -101,7 +102,7 @@ class ClientWebSocketResponse:
|
|||
def _pong_not_received(self) -> None:
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._response.close()
|
||||
|
||||
|
@ -163,7 +164,7 @@ class ClientWebSocketResponse:
|
|||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
|
||||
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
||||
# we need to break `receive()` cycle first,
|
||||
# `close()` may be called from different task
|
||||
if self._waiting is not None and not self._closed:
|
||||
|
@ -176,11 +177,11 @@ class ClientWebSocketResponse:
|
|||
try:
|
||||
await self._writer.close(code, message)
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
@ -191,14 +192,14 @@ class ClientWebSocketResponse:
|
|||
|
||||
while True:
|
||||
try:
|
||||
with async_timeout.timeout(self._timeout, loop=self._loop):
|
||||
async with async_timeout.timeout(self._timeout):
|
||||
msg = await self._reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._response.close()
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
self._response.close()
|
||||
return True
|
||||
|
@ -224,9 +225,7 @@ class ClientWebSocketResponse:
|
|||
try:
|
||||
self._waiting = self._loop.create_future()
|
||||
try:
|
||||
with async_timeout.timeout(
|
||||
timeout or self._receive_timeout, loop=self._loop
|
||||
):
|
||||
async with async_timeout.timeout(timeout or self._receive_timeout):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
|
@ -234,15 +233,15 @@ class ClientWebSocketResponse:
|
|||
self._waiting = None
|
||||
set_result(waiter, True)
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = 1000
|
||||
self._close_code = WSCloseCode.OK
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except ClientError:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
return WS_CLOSED_MESSAGE
|
||||
except WebSocketError as exc:
|
||||
self._close_code = exc.code
|
||||
|
@ -251,7 +250,7 @@ class ClientWebSocketResponse:
|
|||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
|
@ -274,13 +273,13 @@ class ClientWebSocketResponse:
|
|||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.TEXT:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
||||
return msg.data
|
||||
return cast(str, msg.data)
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return msg.data
|
||||
return cast(bytes, msg.data)
|
||||
|
||||
async def receive_json(
|
||||
self,
|
||||
|
|
|
@ -39,12 +39,20 @@ from .client_exceptions import (
|
|||
ClientHttpProxyError,
|
||||
ClientProxyConnectionError,
|
||||
ServerFingerprintMismatch,
|
||||
UnixClientConnectorError,
|
||||
cert_errors,
|
||||
ssl_errors,
|
||||
)
|
||||
from .client_proto import ResponseHandler
|
||||
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
||||
from .helpers import PY_36, CeilTimeout, get_running_loop, is_ip_address, noop, sentinel
|
||||
from .helpers import (
|
||||
PY_36,
|
||||
ceil_timeout,
|
||||
get_running_loop,
|
||||
is_ip_address,
|
||||
noop,
|
||||
sentinel,
|
||||
)
|
||||
from .http import RESPONSES
|
||||
from .locks import EventResultOrError
|
||||
from .resolver import DefaultResolver
|
||||
|
@ -54,8 +62,8 @@ try:
|
|||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = None # type: ignore
|
||||
SSLContext = object # type: ignore
|
||||
ssl = None # type: ignore[assignment]
|
||||
SSLContext = object # type: ignore[misc,assignment]
|
||||
|
||||
|
||||
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
||||
|
@ -102,8 +110,8 @@ class Connection:
|
|||
self._key = key
|
||||
self._connector = connector
|
||||
self._loop = loop
|
||||
self._protocol = protocol # type: Optional[ResponseHandler]
|
||||
self._callbacks = [] # type: List[Callable[[], None]]
|
||||
self._protocol: Optional[ResponseHandler] = protocol
|
||||
self._callbacks: List[Callable[[], None]] = []
|
||||
|
||||
if loop.get_debug():
|
||||
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
||||
|
@ -178,7 +186,7 @@ class Connection:
|
|||
|
||||
|
||||
class _TransportPlaceholder:
|
||||
""" placeholder for BaseConnector.connect function """
|
||||
"""placeholder for BaseConnector.connect function"""
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
@ -229,33 +237,31 @@ class BaseConnector:
|
|||
if loop.get_debug():
|
||||
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
||||
|
||||
self._conns = (
|
||||
{}
|
||||
) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
|
||||
self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
|
||||
self._limit = limit
|
||||
self._limit_per_host = limit_per_host
|
||||
self._acquired = set() # type: Set[ResponseHandler]
|
||||
self._acquired_per_host = defaultdict(
|
||||
set
|
||||
) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
|
||||
self._acquired: Set[ResponseHandler] = set()
|
||||
self._acquired_per_host: DefaultDict[
|
||||
ConnectionKey, Set[ResponseHandler]
|
||||
] = defaultdict(set)
|
||||
self._keepalive_timeout = cast(float, keepalive_timeout)
|
||||
self._force_close = force_close
|
||||
|
||||
# {host_key: FIFO list of waiters}
|
||||
self._waiters = defaultdict(deque) # type: ignore
|
||||
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
|
||||
|
||||
self._loop = loop
|
||||
self._factory = functools.partial(ResponseHandler, loop=loop)
|
||||
|
||||
self.cookies = SimpleCookie() # type: SimpleCookie[str]
|
||||
self.cookies: SimpleCookie[str] = SimpleCookie()
|
||||
|
||||
# start keep-alive connection cleanup task
|
||||
self._cleanup_handle = None
|
||||
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
||||
|
||||
# start cleanup closed transports task
|
||||
self._cleanup_closed_handle = None
|
||||
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
||||
self._cleanup_closed_disabled = not enable_cleanup_closed
|
||||
self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
|
||||
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
|
||||
self._cleanup_closed()
|
||||
|
||||
def __del__(self, _warnings: Any = warnings) -> None:
|
||||
|
@ -284,14 +290,14 @@ class BaseConnector:
|
|||
|
||||
def __enter__(self) -> "BaseConnector":
|
||||
warnings.warn(
|
||||
'"witn Connector():" is deprecated, '
|
||||
'"with Connector():" is deprecated, '
|
||||
'use "async with Connector():" instead',
|
||||
DeprecationWarning,
|
||||
)
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc: Any) -> None:
|
||||
self.close()
|
||||
self._close()
|
||||
|
||||
async def __aenter__(self) -> "BaseConnector":
|
||||
return self
|
||||
|
@ -320,12 +326,10 @@ class BaseConnector:
|
|||
|
||||
@property
|
||||
def limit_per_host(self) -> int:
|
||||
"""The limit_per_host for simultaneous connections
|
||||
to the same endpoint.
|
||||
"""The limit for simultaneous connections to the same endpoint.
|
||||
|
||||
Endpoints are the same if they are have equal
|
||||
(host, port, is_ssl) triple.
|
||||
|
||||
"""
|
||||
return self._limit_per_host
|
||||
|
||||
|
@ -383,6 +387,7 @@ class BaseConnector:
|
|||
|
||||
def _cleanup_closed(self) -> None:
|
||||
"""Double confirmation for transport close.
|
||||
|
||||
Some broken ssl servers may leave socket open without proper close.
|
||||
"""
|
||||
if self._cleanup_closed_handle:
|
||||
|
@ -451,13 +456,13 @@ class BaseConnector:
|
|||
|
||||
def _available_connections(self, key: "ConnectionKey") -> int:
|
||||
"""
|
||||
Return number of available connections taking into account
|
||||
the limit, limit_per_host and the connection key.
|
||||
Return number of available connections.
|
||||
|
||||
If it returns less than 1 means that there is no connections
|
||||
availables.
|
||||
The limit, limit_per_host and the connection key are taken into account.
|
||||
|
||||
If it returns less than 1 means that there are no connections
|
||||
available.
|
||||
"""
|
||||
|
||||
if self._limit:
|
||||
# total calc available connections
|
||||
available = self._limit - len(self._acquired)
|
||||
|
@ -552,8 +557,14 @@ class BaseConnector:
|
|||
await trace.send_connection_create_end()
|
||||
else:
|
||||
if traces:
|
||||
# Acquire the connection to prevent race conditions with limits
|
||||
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
||||
self._acquired.add(placeholder)
|
||||
self._acquired_per_host[key].add(placeholder)
|
||||
for trace in traces:
|
||||
await trace.send_connection_reuseconn()
|
||||
self._acquired.remove(placeholder)
|
||||
self._drop_acquired_per_host(key, placeholder)
|
||||
|
||||
self._acquired.add(proto)
|
||||
self._acquired_per_host[key].add(proto)
|
||||
|
@ -592,7 +603,9 @@ class BaseConnector:
|
|||
|
||||
def _release_waiter(self) -> None:
|
||||
"""
|
||||
Iterates over all waiters till found one that is not finsihed and
|
||||
Iterates over all waiters until one to be released is found.
|
||||
|
||||
The one to be released is not finsihed and
|
||||
belongs to a host that has available connections.
|
||||
"""
|
||||
if not self._waiters:
|
||||
|
@ -670,10 +683,8 @@ class BaseConnector:
|
|||
|
||||
class _DNSCacheTable:
|
||||
def __init__(self, ttl: Optional[float] = None) -> None:
|
||||
self._addrs_rr = (
|
||||
{}
|
||||
) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
|
||||
self._timestamps = {} # type: Dict[Tuple[str, int], float]
|
||||
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
|
||||
self._timestamps: Dict[Tuple[str, int], float] = {}
|
||||
self._ttl = ttl
|
||||
|
||||
def __contains__(self, host: object) -> bool:
|
||||
|
@ -769,9 +780,7 @@ class TCPConnector(BaseConnector):
|
|||
|
||||
self._use_dns_cache = use_dns_cache
|
||||
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
||||
self._throttle_dns_events = (
|
||||
{}
|
||||
) # type: Dict[Tuple[str, int], EventResultOrError]
|
||||
self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
|
||||
self._family = family
|
||||
self._local_addr = local_addr
|
||||
|
||||
|
@ -899,9 +908,11 @@ class TCPConnector(BaseConnector):
|
|||
if verified:
|
||||
return ssl.create_default_context()
|
||||
else:
|
||||
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
sslcontext.options |= ssl.OP_NO_SSLv2
|
||||
sslcontext.options |= ssl.OP_NO_SSLv3
|
||||
sslcontext.check_hostname = False
|
||||
sslcontext.verify_mode = ssl.CERT_NONE
|
||||
try:
|
||||
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
||||
except AttributeError as attr_err:
|
||||
|
@ -965,15 +976,160 @@ class TCPConnector(BaseConnector):
|
|||
**kwargs: Any,
|
||||
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
||||
try:
|
||||
with CeilTimeout(timeout.sock_connect):
|
||||
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
|
||||
async with ceil_timeout(timeout.sock_connect):
|
||||
return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
|
||||
except cert_errors as exc:
|
||||
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
||||
except ssl_errors as exc:
|
||||
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
||||
except OSError as exc:
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
raise client_error(req.connection_key, exc) from exc
|
||||
|
||||
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
||||
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
||||
|
||||
One case is that :py:meth:`asyncio.loop.start_tls` is not yet
|
||||
implemented under Python 3.6. It is necessary for TLS-in-TLS so
|
||||
that it is possible to send HTTPS queries through HTTPS proxies.
|
||||
|
||||
This doesn't affect regular HTTP requests, though.
|
||||
"""
|
||||
if not req.is_ssl():
|
||||
return
|
||||
|
||||
proxy_url = req.proxy
|
||||
assert proxy_url is not None
|
||||
if proxy_url.scheme != "https":
|
||||
return
|
||||
|
||||
self._check_loop_for_start_tls()
|
||||
|
||||
def _check_loop_for_start_tls(self) -> None:
|
||||
try:
|
||||
self._loop.start_tls
|
||||
except AttributeError as attr_exc:
|
||||
raise RuntimeError(
|
||||
"An HTTPS request is being sent through an HTTPS proxy. "
|
||||
"This needs support for TLS in TLS but it is not implemented "
|
||||
"in your runtime for the stdlib asyncio.\n\n"
|
||||
"Please upgrade to Python 3.7 or higher. For more details, "
|
||||
"please see:\n"
|
||||
"* https://bugs.python.org/issue37179\n"
|
||||
"* https://github.com/python/cpython/pull/28073\n"
|
||||
"* https://docs.aiohttp.org/en/stable/"
|
||||
"client_advanced.html#proxy-support\n"
|
||||
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
||||
) from attr_exc
|
||||
|
||||
def _loop_supports_start_tls(self) -> bool:
|
||||
try:
|
||||
self._check_loop_for_start_tls()
|
||||
except RuntimeError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def _warn_about_tls_in_tls(
|
||||
self,
|
||||
underlying_transport: asyncio.Transport,
|
||||
req: "ClientRequest",
|
||||
) -> None:
|
||||
"""Issue a warning if the requested URL has HTTPS scheme."""
|
||||
if req.request_info.url.scheme != "https":
|
||||
return
|
||||
|
||||
asyncio_supports_tls_in_tls = getattr(
|
||||
underlying_transport,
|
||||
"_start_tls_compatible",
|
||||
False,
|
||||
)
|
||||
|
||||
if asyncio_supports_tls_in_tls:
|
||||
return
|
||||
|
||||
warnings.warn(
|
||||
"An HTTPS request is being sent through an HTTPS proxy. "
|
||||
"This support for TLS in TLS is known to be disabled "
|
||||
"in the stdlib asyncio. This is why you'll probably see "
|
||||
"an error in the log below.\n\n"
|
||||
"It is possible to enable it via monkeypatching under "
|
||||
"Python 3.7 or higher. For more details, see:\n"
|
||||
"* https://bugs.python.org/issue37179\n"
|
||||
"* https://github.com/python/cpython/pull/28073\n\n"
|
||||
"You can temporarily patch this as follows:\n"
|
||||
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
||||
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
||||
RuntimeWarning,
|
||||
source=self,
|
||||
# Why `4`? At least 3 of the calls in the stack originate
|
||||
# from the methods in this class.
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
async def _start_tls_connection(
|
||||
self,
|
||||
underlying_transport: asyncio.Transport,
|
||||
req: "ClientRequest",
|
||||
timeout: "ClientTimeout",
|
||||
client_error: Type[Exception] = ClientConnectorError,
|
||||
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
||||
"""Wrap the raw TCP transport with TLS."""
|
||||
tls_proto = self._factory() # Create a brand new proto for TLS
|
||||
|
||||
# Safety of the `cast()` call here is based on the fact that
|
||||
# internally `_get_ssl_context()` only returns `None` when
|
||||
# `req.is_ssl()` evaluates to `False` which is never gonna happen
|
||||
# in this code path. Of course, it's rather fragile
|
||||
# maintainability-wise but this is to be solved separately.
|
||||
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
|
||||
|
||||
try:
|
||||
async with ceil_timeout(timeout.sock_connect):
|
||||
try:
|
||||
tls_transport = await self._loop.start_tls(
|
||||
underlying_transport,
|
||||
tls_proto,
|
||||
sslcontext,
|
||||
server_hostname=req.host,
|
||||
ssl_handshake_timeout=timeout.total,
|
||||
)
|
||||
except BaseException:
|
||||
# We need to close the underlying transport since
|
||||
# `start_tls()` probably failed before it had a
|
||||
# chance to do this:
|
||||
underlying_transport.close()
|
||||
raise
|
||||
except cert_errors as exc:
|
||||
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
||||
except ssl_errors as exc:
|
||||
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
||||
except OSError as exc:
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
raise client_error(req.connection_key, exc) from exc
|
||||
except TypeError as type_err:
|
||||
# Example cause looks like this:
|
||||
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
||||
# object at 0x7f760615e460> is not supported by start_tls()
|
||||
|
||||
raise ClientConnectionError(
|
||||
"Cannot initialize a TLS-in-TLS connection to host "
|
||||
f"{req.host!s}:{req.port:d} through an underlying connection "
|
||||
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
||||
f"[{type_err!s}]"
|
||||
) from type_err
|
||||
else:
|
||||
if tls_transport is None:
|
||||
msg = "Failed to start TLS (possibly caused by closing transport)"
|
||||
raise client_error(req.connection_key, OSError(msg))
|
||||
tls_proto.connection_made(
|
||||
tls_transport
|
||||
) # Kick the state machine of the new TLS protocol
|
||||
|
||||
return tls_transport, tls_proto
|
||||
|
||||
async def _create_direct_connection(
|
||||
self,
|
||||
req: "ClientRequest",
|
||||
|
@ -1006,11 +1162,13 @@ class TCPConnector(BaseConnector):
|
|||
host_resolved.add_done_callback(drop_exception)
|
||||
raise
|
||||
except OSError as exc:
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
# in case of proxy it is not ClientProxyConnectionError
|
||||
# it is problem of resolving proxy ip itself
|
||||
raise ClientConnectorError(req.connection_key, exc) from exc
|
||||
|
||||
last_exc = None # type: Optional[Exception]
|
||||
last_exc: Optional[Exception] = None
|
||||
|
||||
for hinfo in hosts:
|
||||
host = hinfo["host"]
|
||||
|
@ -1052,10 +1210,13 @@ class TCPConnector(BaseConnector):
|
|||
|
||||
async def _create_proxy_connection(
|
||||
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
||||
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
||||
headers = {} # type: Dict[str, str]
|
||||
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
||||
self._fail_on_no_start_tls(req)
|
||||
runtime_has_start_tls = self._loop_supports_start_tls()
|
||||
|
||||
headers: Dict[str, str] = {}
|
||||
if req.proxy_headers is not None:
|
||||
headers = req.proxy_headers # type: ignore
|
||||
headers = req.proxy_headers # type: ignore[assignment]
|
||||
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
||||
|
||||
url = req.proxy
|
||||
|
@ -1087,7 +1248,9 @@ class TCPConnector(BaseConnector):
|
|||
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
||||
|
||||
if req.is_ssl():
|
||||
sslcontext = self._get_ssl_context(req)
|
||||
if runtime_has_start_tls:
|
||||
self._warn_about_tls_in_tls(transport, req)
|
||||
|
||||
# For HTTPS requests over HTTP proxy
|
||||
# we must notify proxy to tunnel connection
|
||||
# so we send CONNECT command:
|
||||
|
@ -1107,7 +1270,11 @@ class TCPConnector(BaseConnector):
|
|||
try:
|
||||
protocol = conn._protocol
|
||||
assert protocol is not None
|
||||
protocol.set_response_params()
|
||||
|
||||
# read_until_eof=True will ensure the connection isn't closed
|
||||
# once the response is received and processed allowing
|
||||
# START_TLS to work on the connection below.
|
||||
protocol.set_response_params(read_until_eof=runtime_has_start_tls)
|
||||
resp = await proxy_resp.start(conn)
|
||||
except BaseException:
|
||||
proxy_resp.close()
|
||||
|
@ -1128,21 +1295,42 @@ class TCPConnector(BaseConnector):
|
|||
message=message,
|
||||
headers=resp.headers,
|
||||
)
|
||||
rawsock = transport.get_extra_info("socket", default=None)
|
||||
if rawsock is None:
|
||||
raise RuntimeError("Transport does not expose socket instance")
|
||||
# Duplicate the socket, so now we can close proxy transport
|
||||
rawsock = rawsock.dup()
|
||||
finally:
|
||||
if not runtime_has_start_tls:
|
||||
rawsock = transport.get_extra_info("socket", default=None)
|
||||
if rawsock is None:
|
||||
raise RuntimeError(
|
||||
"Transport does not expose socket instance"
|
||||
)
|
||||
# Duplicate the socket, so now we can close proxy transport
|
||||
rawsock = rawsock.dup()
|
||||
except BaseException:
|
||||
# It shouldn't be closed in `finally` because it's fed to
|
||||
# `loop.start_tls()` and the docs say not to touch it after
|
||||
# passing there.
|
||||
transport.close()
|
||||
raise
|
||||
finally:
|
||||
if not runtime_has_start_tls:
|
||||
transport.close()
|
||||
|
||||
transport, proto = await self._wrap_create_connection(
|
||||
self._factory,
|
||||
timeout=timeout,
|
||||
ssl=sslcontext,
|
||||
sock=rawsock,
|
||||
server_hostname=req.host,
|
||||
if not runtime_has_start_tls:
|
||||
# HTTP proxy with support for upgrade to HTTPS
|
||||
sslcontext = self._get_ssl_context(req)
|
||||
return await self._wrap_create_connection(
|
||||
self._factory,
|
||||
timeout=timeout,
|
||||
ssl=sslcontext,
|
||||
sock=rawsock,
|
||||
server_hostname=req.host,
|
||||
req=req,
|
||||
)
|
||||
|
||||
return await self._start_tls_connection(
|
||||
# Access the old transport for the last time before it's
|
||||
# closed and forgotten forever:
|
||||
transport,
|
||||
req=req,
|
||||
timeout=timeout,
|
||||
)
|
||||
finally:
|
||||
proxy_resp.close()
|
||||
|
@ -1189,12 +1377,14 @@ class UnixConnector(BaseConnector):
|
|||
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
||||
) -> ResponseHandler:
|
||||
try:
|
||||
with CeilTimeout(timeout.sock_connect):
|
||||
async with ceil_timeout(timeout.sock_connect):
|
||||
_, proto = await self._loop.create_unix_connection(
|
||||
self._factory, self._path
|
||||
)
|
||||
except OSError as exc:
|
||||
raise ClientConnectorError(req.connection_key, exc) from exc
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
||||
|
||||
return cast(ResponseHandler, proto)
|
||||
|
||||
|
@ -1230,7 +1420,9 @@ class NamedPipeConnector(BaseConnector):
|
|||
limit_per_host=limit_per_host,
|
||||
loop=loop,
|
||||
)
|
||||
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore
|
||||
if not isinstance(
|
||||
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Named Pipes only available in proactor " "loop under windows"
|
||||
)
|
||||
|
@ -1245,8 +1437,8 @@ class NamedPipeConnector(BaseConnector):
|
|||
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
||||
) -> ResponseHandler:
|
||||
try:
|
||||
with CeilTimeout(timeout.sock_connect):
|
||||
_, proto = await self._loop.create_pipe_connection( # type: ignore
|
||||
async with ceil_timeout(timeout.sock_connect):
|
||||
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
|
||||
self._factory, self._path
|
||||
)
|
||||
# the drain is required so that the connection_made is called
|
||||
|
@ -1257,6 +1449,8 @@ class NamedPipeConnector(BaseConnector):
|
|||
# other option is to manually set transport like
|
||||
# `proto.transport = trans`
|
||||
except OSError as exc:
|
||||
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
||||
raise
|
||||
raise ClientConnectorError(req.connection_key, exc) from exc
|
||||
|
||||
return cast(ResponseHandler, proto)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
import datetime
|
||||
import os # noqa
|
||||
import pathlib
|
||||
|
@ -11,6 +12,7 @@ from typing import ( # noqa
|
|||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
|
@ -21,9 +23,9 @@ from typing import ( # noqa
|
|||
|
||||
from yarl import URL
|
||||
|
||||
from .abc import AbstractCookieJar
|
||||
from .abc import AbstractCookieJar, ClearCookiePredicate
|
||||
from .helpers import is_ip_address, next_whole_second
|
||||
from .typedefs import LooseCookies, PathLike
|
||||
from .typedefs import LooseCookies, PathLike, StrOrURL
|
||||
|
||||
__all__ = ("CookieJar", "DummyCookieJar")
|
||||
|
||||
|
@ -52,24 +54,37 @@ class CookieJar(AbstractCookieJar):
|
|||
|
||||
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
|
||||
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
unsafe: bool = False,
|
||||
quote_cookie: bool = True,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
super().__init__(loop=loop)
|
||||
self._cookies = defaultdict(
|
||||
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie[str]] = defaultdict(
|
||||
SimpleCookie
|
||||
) # type: DefaultDict[str, SimpleCookie[str]]
|
||||
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
||||
)
|
||||
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
||||
self._unsafe = unsafe
|
||||
self._quote_cookie = quote_cookie
|
||||
if treat_as_secure_origin is None:
|
||||
treat_as_secure_origin = []
|
||||
elif isinstance(treat_as_secure_origin, URL):
|
||||
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
||||
elif isinstance(treat_as_secure_origin, str):
|
||||
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
||||
else:
|
||||
treat_as_secure_origin = [
|
||||
URL(url).origin() if isinstance(url, str) else url.origin()
|
||||
for url in treat_as_secure_origin
|
||||
]
|
||||
self._treat_as_secure_origin = treat_as_secure_origin
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
|
||||
self._expirations: Dict[Tuple[str, str, str], datetime.datetime] = {}
|
||||
# #4515: datetime.max may not be representable on 32-bit platforms
|
||||
self._max_time = self.MAX_TIME
|
||||
try:
|
||||
|
@ -87,11 +102,41 @@ class CookieJar(AbstractCookieJar):
|
|||
with file_path.open(mode="rb") as f:
|
||||
self._cookies = pickle.load(f)
|
||||
|
||||
def clear(self) -> None:
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._next_expiration = next_whole_second()
|
||||
self._expirations.clear()
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
if predicate is None:
|
||||
self._next_expiration = next_whole_second()
|
||||
self._cookies.clear()
|
||||
self._host_only_cookies.clear()
|
||||
self._expirations.clear()
|
||||
return
|
||||
|
||||
to_del = []
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
for (domain, path), cookie in self._cookies.items():
|
||||
for name, morsel in cookie.items():
|
||||
key = (domain, path, name)
|
||||
if (
|
||||
key in self._expirations and self._expirations[key] <= now
|
||||
) or predicate(morsel):
|
||||
to_del.append(key)
|
||||
|
||||
for domain, path, name in to_del:
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
key = (domain, path, name)
|
||||
if key in self._expirations:
|
||||
del self._expirations[(domain, path, name)]
|
||||
self._cookies[(domain, path)].pop(name, None)
|
||||
|
||||
next_expiration = min(self._expirations.values(), default=self._max_time)
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
||||
|
||||
def __iter__(self) -> "Iterator[Morsel[str]]":
|
||||
self._do_expiration()
|
||||
|
@ -102,35 +147,13 @@ class CookieJar(AbstractCookieJar):
|
|||
return sum(1 for i in self)
|
||||
|
||||
def _do_expiration(self) -> None:
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
if self._next_expiration > now:
|
||||
return
|
||||
if not self._expirations:
|
||||
return
|
||||
next_expiration = self._max_time
|
||||
to_del = []
|
||||
cookies = self._cookies
|
||||
expirations = self._expirations
|
||||
for (domain, name), when in expirations.items():
|
||||
if when <= now:
|
||||
cookies[domain].pop(name, None)
|
||||
to_del.append((domain, name))
|
||||
self._host_only_cookies.discard((domain, name))
|
||||
else:
|
||||
next_expiration = min(next_expiration, when)
|
||||
for key in to_del:
|
||||
del expirations[key]
|
||||
self.clear(lambda x: False)
|
||||
|
||||
try:
|
||||
self._next_expiration = next_expiration.replace(
|
||||
microsecond=0
|
||||
) + datetime.timedelta(seconds=1)
|
||||
except OverflowError:
|
||||
self._next_expiration = self._max_time
|
||||
|
||||
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
|
||||
def _expire_cookie(
|
||||
self, when: datetime.datetime, domain: str, path: str, name: str
|
||||
) -> None:
|
||||
self._next_expiration = min(self._next_expiration, when)
|
||||
self._expirations[(domain, name)] = when
|
||||
self._expirations[(domain, path, name)] = when
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
"""Update cookies."""
|
||||
|
@ -145,8 +168,8 @@ class CookieJar(AbstractCookieJar):
|
|||
|
||||
for name, cookie in cookies:
|
||||
if not isinstance(cookie, Morsel):
|
||||
tmp = SimpleCookie() # type: SimpleCookie[str]
|
||||
tmp[name] = cookie # type: ignore
|
||||
tmp: SimpleCookie[str] = SimpleCookie()
|
||||
tmp[name] = cookie # type: ignore[assignment]
|
||||
cookie = tmp[name]
|
||||
|
||||
domain = cookie["domain"]
|
||||
|
@ -192,7 +215,7 @@ class CookieJar(AbstractCookieJar):
|
|||
) + datetime.timedelta(seconds=delta_seconds)
|
||||
except OverflowError:
|
||||
max_age_expiration = self._max_time
|
||||
self._expire_cookie(max_age_expiration, domain, name)
|
||||
self._expire_cookie(max_age_expiration, domain, path, name)
|
||||
except ValueError:
|
||||
cookie["max-age"] = ""
|
||||
|
||||
|
@ -201,11 +224,11 @@ class CookieJar(AbstractCookieJar):
|
|||
if expires:
|
||||
expire_time = self._parse_date(expires)
|
||||
if expire_time:
|
||||
self._expire_cookie(expire_time, domain, name)
|
||||
self._expire_cookie(expire_time, domain, path, name)
|
||||
else:
|
||||
cookie["expires"] = ""
|
||||
|
||||
self._cookies[domain][name] = cookie
|
||||
self._cookies[(domain, path)][name] = cookie
|
||||
|
||||
self._do_expiration()
|
||||
|
||||
|
@ -219,7 +242,14 @@ class CookieJar(AbstractCookieJar):
|
|||
SimpleCookie() if self._quote_cookie else BaseCookie()
|
||||
)
|
||||
hostname = request_url.raw_host or ""
|
||||
is_not_secure = request_url.scheme not in ("https", "wss")
|
||||
request_origin = URL()
|
||||
with contextlib.suppress(ValueError):
|
||||
request_origin = request_url.origin()
|
||||
|
||||
is_not_secure = (
|
||||
request_url.scheme not in ("https", "wss")
|
||||
and request_origin not in self._treat_as_secure_origin
|
||||
)
|
||||
|
||||
for cookie in self:
|
||||
name = cookie.key
|
||||
|
@ -312,7 +342,7 @@ class CookieJar(AbstractCookieJar):
|
|||
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
||||
if time_match:
|
||||
found_time = True
|
||||
hour, minute, second = [int(s) for s in time_match.groups()]
|
||||
hour, minute, second = (int(s) for s in time_match.groups())
|
||||
continue
|
||||
|
||||
if not found_day:
|
||||
|
@ -372,7 +402,10 @@ class DummyCookieJar(AbstractCookieJar):
|
|||
def __len__(self) -> int:
|
||||
return 0
|
||||
|
||||
def clear(self) -> None:
|
||||
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
||||
pass
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
pass
|
||||
|
||||
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
||||
|
|
|
@ -12,8 +12,10 @@ __all__ = ("FormData",)
|
|||
|
||||
|
||||
class FormData:
|
||||
"""Helper class for multipart/form-data and
|
||||
application/x-www-form-urlencoded body generation."""
|
||||
"""Helper class for form body generation.
|
||||
|
||||
Supports multipart/form-data and application/x-www-form-urlencoded.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -22,7 +24,7 @@ class FormData:
|
|||
charset: Optional[str] = None,
|
||||
) -> None:
|
||||
self._writer = multipart.MultipartWriter("form-data")
|
||||
self._fields = [] # type: List[Any]
|
||||
self._fields: List[Any] = []
|
||||
self._is_multipart = False
|
||||
self._is_processed = False
|
||||
self._quote_fields = quote_fields
|
||||
|
@ -45,7 +47,7 @@ class FormData:
|
|||
*,
|
||||
content_type: Optional[str] = None,
|
||||
filename: Optional[str] = None,
|
||||
content_transfer_encoding: Optional[str] = None
|
||||
content_transfer_encoding: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
if isinstance(value, io.IOBase):
|
||||
|
@ -54,7 +56,7 @@ class FormData:
|
|||
if filename is None and content_transfer_encoding is None:
|
||||
filename = name
|
||||
|
||||
type_options = MultiDict({"name": name}) # type: MultiDict[str]
|
||||
type_options: MultiDict[str] = MultiDict({"name": name})
|
||||
if filename is not None and not isinstance(filename, str):
|
||||
raise TypeError(
|
||||
"filename must be an instance of str. " "Got: %s" % filename
|
||||
|
@ -92,14 +94,14 @@ class FormData:
|
|||
|
||||
if isinstance(rec, io.IOBase):
|
||||
k = guess_filename(rec, "unknown")
|
||||
self.add_field(k, rec) # type: ignore
|
||||
self.add_field(k, rec) # type: ignore[arg-type]
|
||||
|
||||
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
||||
to_add.extend(rec.items())
|
||||
|
||||
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
||||
k, fp = rec
|
||||
self.add_field(k, fp) # type: ignore
|
||||
self.add_field(k, fp) # type: ignore[arg-type]
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
|
|
|
@ -2,21 +2,28 @@
|
|||
|
||||
# After changing the file content call ./tools/gen.py
|
||||
# to regenerate the headers parser
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
from multidict import istr
|
||||
|
||||
METH_ANY = "*"
|
||||
METH_CONNECT = "CONNECT"
|
||||
METH_HEAD = "HEAD"
|
||||
METH_GET = "GET"
|
||||
METH_DELETE = "DELETE"
|
||||
METH_OPTIONS = "OPTIONS"
|
||||
METH_PATCH = "PATCH"
|
||||
METH_POST = "POST"
|
||||
METH_PUT = "PUT"
|
||||
METH_TRACE = "TRACE"
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final
|
||||
else:
|
||||
from typing_extensions import Final
|
||||
|
||||
METH_ALL = {
|
||||
METH_ANY: Final[str] = "*"
|
||||
METH_CONNECT: Final[str] = "CONNECT"
|
||||
METH_HEAD: Final[str] = "HEAD"
|
||||
METH_GET: Final[str] = "GET"
|
||||
METH_DELETE: Final[str] = "DELETE"
|
||||
METH_OPTIONS: Final[str] = "OPTIONS"
|
||||
METH_PATCH: Final[str] = "PATCH"
|
||||
METH_POST: Final[str] = "POST"
|
||||
METH_PUT: Final[str] = "PUT"
|
||||
METH_TRACE: Final[str] = "TRACE"
|
||||
|
||||
METH_ALL: Final[Set[str]] = {
|
||||
METH_CONNECT,
|
||||
METH_HEAD,
|
||||
METH_GET,
|
||||
|
@ -28,81 +35,80 @@ METH_ALL = {
|
|||
METH_TRACE,
|
||||
}
|
||||
|
||||
|
||||
ACCEPT = istr("Accept")
|
||||
ACCEPT_CHARSET = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE = istr("Accept-Language")
|
||||
ACCEPT_RANGES = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method")
|
||||
AGE = istr("Age")
|
||||
ALLOW = istr("Allow")
|
||||
AUTHORIZATION = istr("Authorization")
|
||||
CACHE_CONTROL = istr("Cache-Control")
|
||||
CONNECTION = istr("Connection")
|
||||
CONTENT_DISPOSITION = istr("Content-Disposition")
|
||||
CONTENT_ENCODING = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE = istr("Content-Language")
|
||||
CONTENT_LENGTH = istr("Content-Length")
|
||||
CONTENT_LOCATION = istr("Content-Location")
|
||||
CONTENT_MD5 = istr("Content-MD5")
|
||||
CONTENT_RANGE = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE = istr("Content-Type")
|
||||
COOKIE = istr("Cookie")
|
||||
DATE = istr("Date")
|
||||
DESTINATION = istr("Destination")
|
||||
DIGEST = istr("Digest")
|
||||
ETAG = istr("Etag")
|
||||
EXPECT = istr("Expect")
|
||||
EXPIRES = istr("Expires")
|
||||
FORWARDED = istr("Forwarded")
|
||||
FROM = istr("From")
|
||||
HOST = istr("Host")
|
||||
IF_MATCH = istr("If-Match")
|
||||
IF_MODIFIED_SINCE = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH = istr("If-None-Match")
|
||||
IF_RANGE = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE = istr("Keep-Alive")
|
||||
LAST_EVENT_ID = istr("Last-Event-ID")
|
||||
LAST_MODIFIED = istr("Last-Modified")
|
||||
LINK = istr("Link")
|
||||
LOCATION = istr("Location")
|
||||
MAX_FORWARDS = istr("Max-Forwards")
|
||||
ORIGIN = istr("Origin")
|
||||
PRAGMA = istr("Pragma")
|
||||
PROXY_AUTHENTICATE = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION = istr("Proxy-Authorization")
|
||||
RANGE = istr("Range")
|
||||
REFERER = istr("Referer")
|
||||
RETRY_AFTER = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1")
|
||||
SERVER = istr("Server")
|
||||
SET_COOKIE = istr("Set-Cookie")
|
||||
TE = istr("TE")
|
||||
TRAILER = istr("Trailer")
|
||||
TRANSFER_ENCODING = istr("Transfer-Encoding")
|
||||
UPGRADE = istr("Upgrade")
|
||||
URI = istr("URI")
|
||||
USER_AGENT = istr("User-Agent")
|
||||
VARY = istr("Vary")
|
||||
VIA = istr("Via")
|
||||
WANT_DIGEST = istr("Want-Digest")
|
||||
WARNING = istr("Warning")
|
||||
WWW_AUTHENTICATE = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO = istr("X-Forwarded-Proto")
|
||||
ACCEPT: Final[istr] = istr("Accept")
|
||||
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
||||
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
||||
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
||||
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
||||
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
||||
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
||||
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
||||
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
||||
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
||||
AGE: Final[istr] = istr("Age")
|
||||
ALLOW: Final[istr] = istr("Allow")
|
||||
AUTHORIZATION: Final[istr] = istr("Authorization")
|
||||
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
||||
CONNECTION: Final[istr] = istr("Connection")
|
||||
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
||||
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
||||
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
||||
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
||||
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
||||
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
||||
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
||||
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
||||
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
||||
COOKIE: Final[istr] = istr("Cookie")
|
||||
DATE: Final[istr] = istr("Date")
|
||||
DESTINATION: Final[istr] = istr("Destination")
|
||||
DIGEST: Final[istr] = istr("Digest")
|
||||
ETAG: Final[istr] = istr("Etag")
|
||||
EXPECT: Final[istr] = istr("Expect")
|
||||
EXPIRES: Final[istr] = istr("Expires")
|
||||
FORWARDED: Final[istr] = istr("Forwarded")
|
||||
FROM: Final[istr] = istr("From")
|
||||
HOST: Final[istr] = istr("Host")
|
||||
IF_MATCH: Final[istr] = istr("If-Match")
|
||||
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
||||
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
||||
IF_RANGE: Final[istr] = istr("If-Range")
|
||||
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
||||
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
||||
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
||||
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
||||
LINK: Final[istr] = istr("Link")
|
||||
LOCATION: Final[istr] = istr("Location")
|
||||
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
||||
ORIGIN: Final[istr] = istr("Origin")
|
||||
PRAGMA: Final[istr] = istr("Pragma")
|
||||
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
||||
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
||||
RANGE: Final[istr] = istr("Range")
|
||||
REFERER: Final[istr] = istr("Referer")
|
||||
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
||||
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
||||
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
||||
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
||||
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
||||
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
||||
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
||||
SERVER: Final[istr] = istr("Server")
|
||||
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
||||
TE: Final[istr] = istr("TE")
|
||||
TRAILER: Final[istr] = istr("Trailer")
|
||||
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
||||
UPGRADE: Final[istr] = istr("Upgrade")
|
||||
URI: Final[istr] = istr("URI")
|
||||
USER_AGENT: Final[istr] = istr("User-Agent")
|
||||
VARY: Final[istr] = istr("Vary")
|
||||
VIA: Final[istr] = istr("Via")
|
||||
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
||||
WARNING: Final[istr] = istr("Warning")
|
||||
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
||||
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
||||
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
||||
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import asyncio
|
||||
import base64
|
||||
import binascii
|
||||
import cgi
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
|
@ -17,12 +16,15 @@ import warnings
|
|||
import weakref
|
||||
from collections import namedtuple
|
||||
from contextlib import suppress
|
||||
from email.parser import HeaderParser
|
||||
from email.utils import parsedate
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ContextManager,
|
||||
Dict,
|
||||
Generator,
|
||||
Generic,
|
||||
|
@ -40,58 +42,55 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
from urllib.request import getproxies
|
||||
from urllib.request import getproxies, proxy_bypass
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
from multidict import MultiDict, MultiDictProxy
|
||||
from typing_extensions import Protocol
|
||||
from yarl import URL
|
||||
|
||||
from . import hdrs
|
||||
from .log import client_logger, internal_logger
|
||||
from .typedefs import PathLike # noqa
|
||||
from .typedefs import PathLike, Protocol # noqa
|
||||
|
||||
__all__ = ("BasicAuth", "ChainMapProxy")
|
||||
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
||||
|
||||
IS_MACOS = platform.system() == "Darwin"
|
||||
IS_WINDOWS = platform.system() == "Windows"
|
||||
|
||||
PY_36 = sys.version_info >= (3, 6)
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
PY_38 = sys.version_info >= (3, 8)
|
||||
PY_310 = sys.version_info >= (3, 10)
|
||||
PY_311 = sys.version_info >= (3, 11)
|
||||
|
||||
if not PY_37:
|
||||
if sys.version_info < (3, 7):
|
||||
import idna_ssl
|
||||
|
||||
idna_ssl.patch_match_hostname()
|
||||
|
||||
try:
|
||||
from typing import ContextManager
|
||||
except ImportError:
|
||||
from typing_extensions import ContextManager
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
|
||||
def all_tasks(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> Set["asyncio.Task[Any]"]:
|
||||
tasks = list(asyncio.Task.all_tasks(loop))
|
||||
return {t for t in tasks if not t.done()}
|
||||
|
||||
|
||||
if PY_37:
|
||||
all_tasks = getattr(asyncio, "all_tasks")
|
||||
else:
|
||||
all_tasks = asyncio.all_tasks
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
|
||||
sentinel = object() # type: Any
|
||||
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
|
||||
sentinel: Any = object()
|
||||
NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
||||
|
||||
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
||||
# for compatibility with older versions
|
||||
DEBUG = getattr(sys.flags, "dev_mode", False) or (
|
||||
DEBUG: bool = getattr(sys.flags, "dev_mode", False) or (
|
||||
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
||||
) # type: bool
|
||||
)
|
||||
|
||||
|
||||
CHAR = {chr(i) for i in range(0, 128)}
|
||||
|
@ -197,7 +196,9 @@ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
|||
|
||||
|
||||
def netrc_from_env() -> Optional[netrc.netrc]:
|
||||
"""Attempt to load the netrc file from the path specified by the env-var
|
||||
"""Load netrc from file.
|
||||
|
||||
Attempt to load it from the path specified by the env-var
|
||||
NETRC or in the default location in the user's home directory.
|
||||
|
||||
Returns None if it couldn't be found or fails to parse.
|
||||
|
@ -218,9 +219,7 @@ def netrc_from_env() -> Optional[netrc.netrc]:
|
|||
)
|
||||
return None
|
||||
|
||||
netrc_path = home_dir / (
|
||||
"_netrc" if platform.system() == "Windows" else ".netrc"
|
||||
)
|
||||
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
||||
|
||||
try:
|
||||
return netrc.netrc(str(netrc_path))
|
||||
|
@ -243,14 +242,20 @@ class ProxyInfo:
|
|||
|
||||
|
||||
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
||||
proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ("http", "https")}
|
||||
proxy_urls = {
|
||||
k: URL(v)
|
||||
for k, v in getproxies().items()
|
||||
if k in ("http", "https", "ws", "wss")
|
||||
}
|
||||
netrc_obj = netrc_from_env()
|
||||
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
||||
ret = {}
|
||||
for proto, val in stripped.items():
|
||||
proxy, auth = val
|
||||
if proxy.scheme == "https":
|
||||
client_logger.warning("HTTPS proxies %s are not supported, ignoring", proxy)
|
||||
if proxy.scheme in ("https", "wss"):
|
||||
client_logger.warning(
|
||||
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
||||
)
|
||||
continue
|
||||
if netrc_obj and auth is None:
|
||||
auth_from_netrc = None
|
||||
|
@ -270,7 +275,7 @@ def proxies_from_env() -> Dict[str, ProxyInfo]:
|
|||
def current_task(
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> "Optional[asyncio.Task[Any]]":
|
||||
if PY_37:
|
||||
if sys.version_info >= (3, 7):
|
||||
return asyncio.current_task(loop=loop)
|
||||
else:
|
||||
return asyncio.Task.current_task(loop=loop)
|
||||
|
@ -297,11 +302,25 @@ def get_running_loop(
|
|||
def isasyncgenfunction(obj: Any) -> bool:
|
||||
func = getattr(inspect, "isasyncgenfunction", None)
|
||||
if func is not None:
|
||||
return func(obj)
|
||||
return func(obj) # type: ignore[no-any-return]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
||||
"""Get a permitted proxy for the given URL from the env."""
|
||||
if url.host is not None and proxy_bypass(url.host):
|
||||
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
||||
|
||||
proxies_in_env = proxies_from_env()
|
||||
try:
|
||||
proxy_info = proxies_in_env[url.scheme]
|
||||
except KeyError:
|
||||
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
||||
else:
|
||||
return proxy_info.proxy, proxy_info.proxy_auth
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class MimeType:
|
||||
type: str
|
||||
|
@ -331,7 +350,7 @@ def parse_mimetype(mimetype: str) -> MimeType:
|
|||
)
|
||||
|
||||
parts = mimetype.split(";")
|
||||
params = MultiDict() # type: MultiDict[str]
|
||||
params: MultiDict[str] = MultiDict()
|
||||
for item in parts[1:]:
|
||||
if not item:
|
||||
continue
|
||||
|
@ -365,14 +384,41 @@ def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
|||
return default
|
||||
|
||||
|
||||
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
||||
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
||||
|
||||
|
||||
def quoted_string(content: str) -> str:
|
||||
"""Return 7-bit content as quoted-string.
|
||||
|
||||
Format content into a quoted-string as defined in RFC5322 for
|
||||
Internet Message Format. Notice that this is not the 8-bit HTTP
|
||||
format, but the 7-bit email format. Content must be in usascii or
|
||||
a ValueError is raised.
|
||||
"""
|
||||
if not (QCONTENT > set(content)):
|
||||
raise ValueError(f"bad content for quoted-string {content!r}")
|
||||
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
||||
|
||||
|
||||
def content_disposition_header(
|
||||
disptype: str, quote_fields: bool = True, **params: str
|
||||
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
||||
) -> str:
|
||||
"""Sets ``Content-Disposition`` header.
|
||||
"""Sets ``Content-Disposition`` header for MIME.
|
||||
|
||||
This is the MIME payload Content-Disposition header from RFC 2183
|
||||
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
||||
RFC 6266.
|
||||
|
||||
disptype is a disposition type: inline, attachment, form-data.
|
||||
Should be valid extension token (see RFC 2183)
|
||||
|
||||
quote_fields performs value quoting to 7-bit MIME headers
|
||||
according to RFC 7578. Set to quote_fields to False if recipient
|
||||
can take 8-bit file names and field values.
|
||||
|
||||
_charset specifies the charset to use when quote_fields is True.
|
||||
|
||||
params is a dict with disposition params.
|
||||
"""
|
||||
if not disptype or not (TOKEN > set(disptype)):
|
||||
|
@ -386,26 +432,40 @@ def content_disposition_header(
|
|||
raise ValueError(
|
||||
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
||||
)
|
||||
qval = quote(val, "") if quote_fields else val
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
if key == "filename":
|
||||
lparams.append(("filename*", "utf-8''" + qval))
|
||||
if quote_fields:
|
||||
if key.lower() == "filename":
|
||||
qval = quote(val, "", encoding=_charset)
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
try:
|
||||
qval = quoted_string(val)
|
||||
except ValueError:
|
||||
qval = "".join(
|
||||
(_charset, "''", quote(val, "", encoding=_charset))
|
||||
)
|
||||
lparams.append((key + "*", qval))
|
||||
else:
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
else:
|
||||
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
||||
lparams.append((key, '"%s"' % qval))
|
||||
sparams = "; ".join("=".join(pair) for pair in lparams)
|
||||
value = "; ".join((value, sparams))
|
||||
return value
|
||||
|
||||
|
||||
class _TSelf(Protocol):
|
||||
_cache: Dict[str, Any]
|
||||
class _TSelf(Protocol, Generic[_T]):
|
||||
_cache: Dict[str, _T]
|
||||
|
||||
|
||||
class reify(Generic[_T]):
|
||||
"""Use as a class method decorator. It operates almost exactly like
|
||||
"""Use as a class method decorator.
|
||||
|
||||
It operates almost exactly like
|
||||
the Python `@property` decorator, but it puts the result of the
|
||||
method it decorates into the instance dict after the first call,
|
||||
effectively replacing the function it decorates with an instance
|
||||
variable. It is, in Python parlance, a data descriptor.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
||||
|
@ -413,7 +473,7 @@ class reify(Generic[_T]):
|
|||
self.__doc__ = wrapped.__doc__
|
||||
self.name = wrapped.__name__
|
||||
|
||||
def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T:
|
||||
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
||||
try:
|
||||
try:
|
||||
return inst._cache[self.name]
|
||||
|
@ -426,7 +486,7 @@ class reify(Generic[_T]):
|
|||
return self
|
||||
raise
|
||||
|
||||
def __set__(self, inst: _TSelf, value: _T) -> None:
|
||||
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
||||
raise AttributeError("reified property is read-only")
|
||||
|
||||
|
||||
|
@ -436,7 +496,7 @@ try:
|
|||
from ._helpers import reify as reify_c
|
||||
|
||||
if not NO_EXTENSIONS:
|
||||
reify = reify_c # type: ignore
|
||||
reify = reify_c # type: ignore[misc,assignment]
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -470,7 +530,7 @@ def _is_ip_address(
|
|||
elif isinstance(host, (bytes, bytearray, memoryview)):
|
||||
return bool(regexb.match(host))
|
||||
else:
|
||||
raise TypeError("{} [{}] is not a str or bytes".format(host, type(host)))
|
||||
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
||||
|
||||
|
||||
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
||||
|
@ -488,7 +548,7 @@ def next_whole_second() -> datetime.datetime:
|
|||
) + datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
_cached_current_datetime = None # type: Optional[int]
|
||||
_cached_current_datetime: Optional[int] = None
|
||||
_cached_formatted_datetime = ""
|
||||
|
||||
|
||||
|
@ -532,7 +592,7 @@ def rfc822_formatted_time() -> str:
|
|||
return _cached_formatted_datetime
|
||||
|
||||
|
||||
def _weakref_handle(info): # type: ignore
|
||||
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
||||
ref, name = info
|
||||
ob = ref()
|
||||
if ob is not None:
|
||||
|
@ -540,34 +600,40 @@ def _weakref_handle(info): # type: ignore
|
|||
getattr(ob, name)()
|
||||
|
||||
|
||||
def weakref_handle(ob, name, timeout, loop): # type: ignore
|
||||
def weakref_handle(
|
||||
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout >= 5:
|
||||
when = ceil(when)
|
||||
|
||||
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
||||
return None
|
||||
|
||||
|
||||
def call_later(cb, timeout, loop): # type: ignore
|
||||
def call_later(
|
||||
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
|
||||
) -> Optional[asyncio.TimerHandle]:
|
||||
if timeout is not None and timeout > 0:
|
||||
when = loop.time() + timeout
|
||||
if timeout > 5:
|
||||
when = ceil(when)
|
||||
return loop.call_at(when, cb)
|
||||
return None
|
||||
|
||||
|
||||
class TimeoutHandle:
|
||||
""" Timeout handle """
|
||||
"""Timeout handle"""
|
||||
|
||||
def __init__(
|
||||
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
self._loop = loop
|
||||
self._callbacks = (
|
||||
[]
|
||||
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
|
||||
self._callbacks: List[
|
||||
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
||||
] = []
|
||||
|
||||
def register(
|
||||
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
||||
|
@ -621,11 +687,11 @@ class TimerNoop(BaseTimerContext):
|
|||
|
||||
|
||||
class TimerContext(BaseTimerContext):
|
||||
""" Low resolution timeout context manager """
|
||||
"""Low resolution timeout context manager"""
|
||||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._tasks = [] # type: List[asyncio.Task[Any]]
|
||||
self._tasks: List[asyncio.Task[Any]] = []
|
||||
self._cancelled = False
|
||||
|
||||
def __enter__(self) -> BaseTimerContext:
|
||||
|
@ -637,7 +703,6 @@ class TimerContext(BaseTimerContext):
|
|||
)
|
||||
|
||||
if self._cancelled:
|
||||
task.cancel()
|
||||
raise asyncio.TimeoutError from None
|
||||
|
||||
self._tasks.append(task)
|
||||
|
@ -664,29 +729,24 @@ class TimerContext(BaseTimerContext):
|
|||
self._cancelled = True
|
||||
|
||||
|
||||
class CeilTimeout(async_timeout.timeout):
|
||||
def __enter__(self) -> async_timeout.timeout:
|
||||
if self._timeout is not None:
|
||||
self._task = current_task(loop=self._loop)
|
||||
if self._task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used inside a task"
|
||||
)
|
||||
now = self._loop.time()
|
||||
delay = self._timeout
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
self._cancel_handler = self._loop.call_at(when, self._cancel_task)
|
||||
return self
|
||||
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
|
||||
if delay is None or delay <= 0:
|
||||
return async_timeout.timeout(None)
|
||||
|
||||
loop = get_running_loop()
|
||||
now = loop.time()
|
||||
when = now + delay
|
||||
if delay > 5:
|
||||
when = ceil(when)
|
||||
return async_timeout.timeout_at(when)
|
||||
|
||||
|
||||
class HeadersMixin:
|
||||
|
||||
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
||||
|
||||
_content_type = None # type: Optional[str]
|
||||
_content_dict = None # type: Optional[Dict[str, str]]
|
||||
_content_type: Optional[str] = None
|
||||
_content_dict: Optional[Dict[str, str]] = None
|
||||
_stored_content_type = sentinel
|
||||
|
||||
def _parse_content_type(self, raw: str) -> None:
|
||||
|
@ -696,28 +756,33 @@ class HeadersMixin:
|
|||
self._content_type = "application/octet-stream"
|
||||
self._content_dict = {}
|
||||
else:
|
||||
self._content_type, self._content_dict = cgi.parse_header(raw)
|
||||
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
||||
self._content_type = msg.get_content_type()
|
||||
params = msg.get_params()
|
||||
self._content_dict = dict(params[1:]) # First element is content type again
|
||||
|
||||
@property
|
||||
def content_type(self) -> str:
|
||||
"""The value of content part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_type # type: ignore
|
||||
return self._content_type # type: ignore[return-value]
|
||||
|
||||
@property
|
||||
def charset(self) -> Optional[str]:
|
||||
"""The value of charset part for Content-Type HTTP header."""
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
|
||||
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
||||
if self._stored_content_type != raw:
|
||||
self._parse_content_type(raw)
|
||||
return self._content_dict.get("charset") # type: ignore
|
||||
return self._content_dict.get("charset") # type: ignore[union-attr]
|
||||
|
||||
@property
|
||||
def content_length(self) -> Optional[int]:
|
||||
"""The value of Content-Length HTTP header."""
|
||||
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
|
||||
content_length = self._headers.get( # type: ignore[attr-defined]
|
||||
hdrs.CONTENT_LENGTH
|
||||
)
|
||||
|
||||
if content_length is not None:
|
||||
return int(content_length)
|
||||
|
@ -760,10 +825,10 @@ class ChainMapProxy(Mapping[str, Any]):
|
|||
|
||||
def __len__(self) -> int:
|
||||
# reuses stored hash values if possible
|
||||
return len(set().union(*self._maps)) # type: ignore
|
||||
return len(set().union(*self._maps)) # type: ignore[arg-type]
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
d = {} # type: Dict[str, Any]
|
||||
d: Dict[str, Any] = {}
|
||||
for mapping in reversed(self._maps):
|
||||
# reuses stored hash values if possible
|
||||
d.update(mapping)
|
||||
|
@ -778,3 +843,36 @@ class ChainMapProxy(Mapping[str, Any]):
|
|||
def __repr__(self) -> str:
|
||||
content = ", ".join(map(repr, self._maps))
|
||||
return f"ChainMapProxy({content})"
|
||||
|
||||
|
||||
# https://tools.ietf.org/html/rfc7232#section-2.3
|
||||
_ETAGC = r"[!#-}\x80-\xff]+"
|
||||
_ETAGC_RE = re.compile(_ETAGC)
|
||||
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
||||
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
||||
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
||||
|
||||
ETAG_ANY = "*"
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class ETag:
|
||||
value: str
|
||||
is_weak: bool = False
|
||||
|
||||
|
||||
def validate_etag_value(value: str) -> None:
|
||||
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
||||
raise ValueError(
|
||||
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
||||
)
|
||||
|
||||
|
||||
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
||||
"""Process a date string, return a datetime object"""
|
||||
if date_str is not None:
|
||||
timetuple = parsedate(date_str)
|
||||
if timetuple is not None:
|
||||
with suppress(ValueError):
|
||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
|
|
@ -63,10 +63,8 @@ __all__ = (
|
|||
)
|
||||
|
||||
|
||||
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
||||
sys.version_info, __version__
|
||||
) # type: str
|
||||
)
|
||||
|
||||
RESPONSES = (
|
||||
http.server.BaseHTTPRequestHandler.responses
|
||||
) # type: Mapping[int, Tuple[str, str]]
|
||||
RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""Low-level http related exceptions."""
|
||||
|
||||
|
||||
from textwrap import indent
|
||||
from typing import Optional, Union
|
||||
|
||||
from .typedefs import _CIMultiDict
|
||||
|
@ -35,10 +36,11 @@ class HttpProcessingError(Exception):
|
|||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.code}, message={self.message!r}"
|
||||
msg = indent(self.message, " ")
|
||||
return f"{self.code}, message:\n{msg}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}: {self}>"
|
||||
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
||||
|
||||
|
||||
class BadHttpMessage(HttpProcessingError):
|
||||
|
|
|
@ -4,8 +4,22 @@ import collections
|
|||
import re
|
||||
import string
|
||||
import zlib
|
||||
from contextlib import suppress
|
||||
from enum import IntEnum
|
||||
from typing import Any, List, Optional, Tuple, Type, Union
|
||||
from typing import (
|
||||
Any,
|
||||
Generic,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
@ -14,6 +28,7 @@ from . import hdrs
|
|||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS, BaseTimerContext
|
||||
from .http_exceptions import (
|
||||
BadHttpMessage,
|
||||
BadStatusLine,
|
||||
ContentEncodingError,
|
||||
ContentLengthError,
|
||||
|
@ -24,7 +39,7 @@ from .http_exceptions import (
|
|||
from .http_writer import HttpVersion, HttpVersion10
|
||||
from .log import internal_logger
|
||||
from .streams import EMPTY_PAYLOAD, StreamReader
|
||||
from .typedefs import RawHeaders
|
||||
from .typedefs import Final, RawHeaders
|
||||
|
||||
try:
|
||||
import brotli
|
||||
|
@ -43,7 +58,7 @@ __all__ = (
|
|||
"RawResponseMessage",
|
||||
)
|
||||
|
||||
ASCIISET = set(string.printable)
|
||||
ASCIISET: Final[Set[str]] = set(string.printable)
|
||||
|
||||
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
|
||||
# and https://tools.ietf.org/html/rfc7230#appendix-B
|
||||
|
@ -52,25 +67,23 @@ ASCIISET = set(string.printable)
|
|||
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
||||
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
||||
# token = 1*tchar
|
||||
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
||||
VERSRE = re.compile(r"HTTP/(\d+).(\d+)")
|
||||
HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
||||
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
||||
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
|
||||
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
||||
|
||||
|
||||
class RawRequestMessage(NamedTuple):
|
||||
method: str
|
||||
path: str
|
||||
version: HttpVersion
|
||||
headers: "CIMultiDictProxy[str]"
|
||||
raw_headers: RawHeaders
|
||||
should_close: bool
|
||||
compression: Optional[str]
|
||||
upgrade: bool
|
||||
chunked: bool
|
||||
url: URL
|
||||
|
||||
RawRequestMessage = collections.namedtuple(
|
||||
"RawRequestMessage",
|
||||
[
|
||||
"method",
|
||||
"path",
|
||||
"version",
|
||||
"headers",
|
||||
"raw_headers",
|
||||
"should_close",
|
||||
"compression",
|
||||
"upgrade",
|
||||
"chunked",
|
||||
"url",
|
||||
],
|
||||
)
|
||||
|
||||
RawResponseMessage = collections.namedtuple(
|
||||
"RawResponseMessage",
|
||||
|
@ -88,6 +101,9 @@ RawResponseMessage = collections.namedtuple(
|
|||
)
|
||||
|
||||
|
||||
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
||||
|
||||
|
||||
class ParseState(IntEnum):
|
||||
|
||||
PARSE_NONE = 0
|
||||
|
@ -118,7 +134,7 @@ class HeadersParser:
|
|||
def parse_headers(
|
||||
self, lines: List[bytes]
|
||||
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
||||
headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
headers: CIMultiDict[str] = CIMultiDict()
|
||||
raw_headers = []
|
||||
|
||||
lines_idx = 1
|
||||
|
@ -198,12 +214,12 @@ class HeadersParser:
|
|||
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
||||
|
||||
|
||||
class HttpParser(abc.ABC):
|
||||
class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
def __init__(
|
||||
self,
|
||||
protocol: Optional[BaseProtocol] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
limit: int = 2 ** 16,
|
||||
limit: int = 2**16,
|
||||
max_line_size: int = 8190,
|
||||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
|
@ -229,20 +245,20 @@ class HttpParser(abc.ABC):
|
|||
self.response_with_body = response_with_body
|
||||
self.read_until_eof = read_until_eof
|
||||
|
||||
self._lines = [] # type: List[bytes]
|
||||
self._lines: List[bytes] = []
|
||||
self._tail = b""
|
||||
self._upgraded = False
|
||||
self._payload = None
|
||||
self._payload_parser = None # type: Optional[HttpPayloadParser]
|
||||
self._payload_parser: Optional[HttpPayloadParser] = None
|
||||
self._auto_decompress = auto_decompress
|
||||
self._limit = limit
|
||||
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
|
||||
|
||||
@abc.abstractmethod
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
def parse_message(self, lines: List[bytes]) -> _MsgT:
|
||||
pass
|
||||
|
||||
def feed_eof(self) -> Any:
|
||||
def feed_eof(self) -> Optional[_MsgT]:
|
||||
if self._payload_parser is not None:
|
||||
self._payload_parser.feed_eof()
|
||||
self._payload_parser = None
|
||||
|
@ -254,10 +270,9 @@ class HttpParser(abc.ABC):
|
|||
if self._lines:
|
||||
if self._lines[-1] != "\r\n":
|
||||
self._lines.append(b"")
|
||||
try:
|
||||
with suppress(Exception):
|
||||
return self.parse_message(self._lines)
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
def feed_data(
|
||||
self,
|
||||
|
@ -267,7 +282,7 @@ class HttpParser(abc.ABC):
|
|||
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
||||
METH_CONNECT: str = hdrs.METH_CONNECT,
|
||||
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
||||
) -> Tuple[List[Any], bool, bytes]:
|
||||
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
||||
|
||||
messages = []
|
||||
|
||||
|
@ -297,20 +312,27 @@ class HttpParser(abc.ABC):
|
|||
# \r\n\r\n found
|
||||
if self._lines[-1] == EMPTY:
|
||||
try:
|
||||
msg = self.parse_message(self._lines)
|
||||
msg: _MsgT = self.parse_message(self._lines)
|
||||
finally:
|
||||
self._lines.clear()
|
||||
|
||||
# payload length
|
||||
length = msg.headers.get(CONTENT_LENGTH)
|
||||
if length is not None:
|
||||
def get_content_length() -> Optional[int]:
|
||||
# payload length
|
||||
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
||||
if length_hdr is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
length = int(length)
|
||||
length = int(length_hdr)
|
||||
except ValueError:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
if length < 0:
|
||||
raise InvalidHeader(CONTENT_LENGTH)
|
||||
|
||||
return length
|
||||
|
||||
length = get_content_length()
|
||||
# do not support old websocket spec
|
||||
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
||||
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
||||
|
@ -346,6 +368,7 @@ class HttpParser(abc.ABC):
|
|||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
elif method == METH_CONNECT:
|
||||
assert isinstance(msg, RawRequestMessage)
|
||||
payload = StreamReader(
|
||||
self.protocol,
|
||||
timer=self.timer,
|
||||
|
@ -386,7 +409,7 @@ class HttpParser(abc.ABC):
|
|||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
else:
|
||||
payload = EMPTY_PAYLOAD # type: ignore
|
||||
payload = EMPTY_PAYLOAD
|
||||
|
||||
messages.append((msg, payload))
|
||||
else:
|
||||
|
@ -467,25 +490,36 @@ class HttpParser(abc.ABC):
|
|||
|
||||
# chunking
|
||||
te = headers.get(hdrs.TRANSFER_ENCODING)
|
||||
if te and "chunked" in te.lower():
|
||||
chunked = True
|
||||
if te is not None:
|
||||
if "chunked" == te.lower():
|
||||
chunked = True
|
||||
else:
|
||||
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
||||
|
||||
if hdrs.CONTENT_LENGTH in headers:
|
||||
raise BadHttpMessage(
|
||||
"Content-Length can't be present with Transfer-Encoding",
|
||||
)
|
||||
|
||||
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
||||
|
||||
def set_upgraded(self, val: bool) -> None:
|
||||
"""Set connection upgraded (to websocket) mode.
|
||||
|
||||
:param bool val: new state.
|
||||
"""
|
||||
self._upgraded = val
|
||||
|
||||
|
||||
class HttpRequestParser(HttpParser):
|
||||
"""Read request status line. Exception .http_exceptions.BadStatusLine
|
||||
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
||||
"""Read request status line.
|
||||
|
||||
Exception .http_exceptions.BadStatusLine
|
||||
could be raised in case of any errors in status line.
|
||||
Returns RawRequestMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
||||
# request line
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
|
@ -498,9 +532,6 @@ class HttpRequestParser(HttpParser):
|
|||
"Status line is too long", str(self.max_line_size), str(len(path))
|
||||
)
|
||||
|
||||
path_part, _hash_separator, url_fragment = path.partition("#")
|
||||
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
||||
|
||||
# method
|
||||
if not METHRE.match(method):
|
||||
raise BadStatusLine(method)
|
||||
|
@ -515,6 +546,31 @@ class HttpRequestParser(HttpParser):
|
|||
except Exception:
|
||||
raise BadStatusLine(version)
|
||||
|
||||
if method == "CONNECT":
|
||||
# authority-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
||||
url = URL.build(authority=path, encoded=True)
|
||||
elif path.startswith("/"):
|
||||
# origin-form,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
||||
path_part, _hash_separator, url_fragment = path.partition("#")
|
||||
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
||||
|
||||
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
||||
# NOTE: parser does, otherwise it results into the same
|
||||
# NOTE: HTTP Request-Line input producing different
|
||||
# NOTE: `yarl.URL()` objects
|
||||
url = URL.build(
|
||||
path=path_part,
|
||||
query_string=qs_part,
|
||||
fragment=url_fragment,
|
||||
encoded=True,
|
||||
)
|
||||
else:
|
||||
# absolute-form for proxy maybe,
|
||||
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
||||
url = URL(path, encoded=True)
|
||||
|
||||
# read headers
|
||||
(
|
||||
headers,
|
||||
|
@ -541,26 +597,18 @@ class HttpRequestParser(HttpParser):
|
|||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
||||
# NOTE: parser does, otherwise it results into the same
|
||||
# NOTE: HTTP Request-Line input producing different
|
||||
# NOTE: `yarl.URL()` objects
|
||||
URL.build(
|
||||
path=path_part,
|
||||
query_string=qs_part,
|
||||
fragment=url_fragment,
|
||||
encoded=True,
|
||||
),
|
||||
url,
|
||||
)
|
||||
|
||||
|
||||
class HttpResponseParser(HttpParser):
|
||||
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
||||
"""Read response status line and headers.
|
||||
|
||||
BadStatusLine could be raised in case of any errors in status line.
|
||||
Returns RawResponseMessage"""
|
||||
Returns RawResponseMessage.
|
||||
"""
|
||||
|
||||
def parse_message(self, lines: List[bytes]) -> Any:
|
||||
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
||||
line = lines[0].decode("utf-8", "surrogateescape")
|
||||
try:
|
||||
version, status = line.split(None, 1)
|
||||
|
@ -641,9 +689,9 @@ class HttpPayloadParser:
|
|||
|
||||
# payload decompression wrapper
|
||||
if response_with_body and compression and self._auto_decompress:
|
||||
real_payload = DeflateBuffer(
|
||||
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
||||
payload, compression
|
||||
) # type: Union[StreamReader, DeflateBuffer]
|
||||
)
|
||||
else:
|
||||
real_payload = payload
|
||||
|
||||
|
@ -812,6 +860,8 @@ class HttpPayloadParser:
|
|||
class DeflateBuffer:
|
||||
"""DeflateStream decompress stream and feed data into specified stream."""
|
||||
|
||||
decompressor: Any
|
||||
|
||||
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
||||
self.out = out
|
||||
self.size = 0
|
||||
|
@ -822,9 +872,27 @@ class DeflateBuffer:
|
|||
if not HAS_BROTLI: # pragma: no cover
|
||||
raise ContentEncodingError(
|
||||
"Can not decode content-encoding: brotli (br). "
|
||||
"Please install `brotlipy`"
|
||||
"Please install `Brotli`"
|
||||
)
|
||||
self.decompressor = brotli.Decompressor()
|
||||
|
||||
class BrotliDecoder:
|
||||
# Supports both 'brotlipy' and 'Brotli' packages
|
||||
# since they share an import name. The top branches
|
||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
||||
def __init__(self) -> None:
|
||||
self._obj = brotli.Decompressor()
|
||||
|
||||
def decompress(self, data: bytes) -> bytes:
|
||||
if hasattr(self._obj, "decompress"):
|
||||
return cast(bytes, self._obj.decompress(data))
|
||||
return cast(bytes, self._obj.process(data))
|
||||
|
||||
def flush(self) -> bytes:
|
||||
if hasattr(self._obj, "flush"):
|
||||
return cast(bytes, self._obj.flush())
|
||||
return b""
|
||||
|
||||
self.decompressor = BrotliDecoder()
|
||||
else:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
|
||||
|
@ -886,7 +954,7 @@ RawResponseMessagePy = RawResponseMessage
|
|||
|
||||
try:
|
||||
if not NO_EXTENSIONS:
|
||||
from ._http_parser import ( # type: ignore
|
||||
from ._http_parser import ( # type: ignore[import,no-redef]
|
||||
HttpRequestParser,
|
||||
HttpResponseParser,
|
||||
RawRequestMessage,
|
||||
|
|
|
@ -9,11 +9,12 @@ import sys
|
|||
import zlib
|
||||
from enum import IntEnum
|
||||
from struct import Struct
|
||||
from typing import Any, Callable, List, Optional, Tuple, Union
|
||||
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import NO_EXTENSIONS
|
||||
from .streams import DataQueue
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"WS_CLOSED_MESSAGE",
|
||||
|
@ -33,6 +34,7 @@ class WSCloseCode(IntEnum):
|
|||
GOING_AWAY = 1001
|
||||
PROTOCOL_ERROR = 1002
|
||||
UNSUPPORTED_DATA = 1003
|
||||
ABNORMAL_CLOSURE = 1006
|
||||
INVALID_TEXT = 1007
|
||||
POLICY_VIOLATION = 1008
|
||||
MESSAGE_TOO_BIG = 1009
|
||||
|
@ -40,9 +42,10 @@ class WSCloseCode(IntEnum):
|
|||
INTERNAL_ERROR = 1011
|
||||
SERVICE_RESTART = 1012
|
||||
TRY_AGAIN_LATER = 1013
|
||||
BAD_GATEWAY = 1014
|
||||
|
||||
|
||||
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
|
||||
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
||||
|
||||
|
||||
class WSMsgType(IntEnum):
|
||||
|
@ -69,7 +72,7 @@ class WSMsgType(IntEnum):
|
|||
error = ERROR
|
||||
|
||||
|
||||
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
|
||||
UNPACK_LEN2 = Struct("!H").unpack_from
|
||||
|
@ -79,8 +82,8 @@ PACK_LEN1 = Struct("!BB").pack
|
|||
PACK_LEN2 = Struct("!BBH").pack
|
||||
PACK_LEN3 = Struct("!BBQ").pack
|
||||
PACK_CLOSE_CODE = Struct("!H").pack
|
||||
MSG_SIZE = 2 ** 14
|
||||
DEFAULT_LIMIT = 2 ** 16
|
||||
MSG_SIZE: Final[int] = 2**14
|
||||
DEFAULT_LIMIT: Final[int] = 2**16
|
||||
|
||||
|
||||
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
||||
|
@ -107,18 +110,18 @@ class WebSocketError(Exception):
|
|||
super().__init__(code, message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.args[1]
|
||||
return cast(str, self.args[1])
|
||||
|
||||
|
||||
class WSHandshakeError(Exception):
|
||||
"""WebSocket protocol handshake error."""
|
||||
|
||||
|
||||
native_byteorder = sys.byteorder
|
||||
native_byteorder: Final[str] = sys.byteorder
|
||||
|
||||
|
||||
# Used by _websocket_mask_python
|
||||
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
||||
|
||||
|
||||
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
||||
|
@ -149,16 +152,16 @@ if NO_EXTENSIONS: # pragma: no cover
|
|||
_websocket_mask = _websocket_mask_python
|
||||
else:
|
||||
try:
|
||||
from ._websocket import _websocket_mask_cython # type: ignore
|
||||
from ._websocket import _websocket_mask_cython # type: ignore[import]
|
||||
|
||||
_websocket_mask = _websocket_mask_cython
|
||||
except ImportError: # pragma: no cover
|
||||
_websocket_mask = _websocket_mask_python
|
||||
|
||||
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
||||
|
||||
|
||||
_WS_EXT_RE = re.compile(
|
||||
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
||||
r"^(?:;\s*(?:"
|
||||
r"(server_no_context_takeover)|"
|
||||
r"(client_no_context_takeover)|"
|
||||
|
@ -166,7 +169,7 @@ _WS_EXT_RE = re.compile(
|
|||
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
||||
)
|
||||
|
||||
_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?")
|
||||
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
||||
|
||||
|
||||
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
||||
|
@ -256,22 +259,22 @@ class WebSocketReader:
|
|||
self.queue = queue
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._partial = bytearray()
|
||||
self._state = WSParserState.READ_HEADER
|
||||
|
||||
self._opcode = None # type: Optional[int]
|
||||
self._opcode: Optional[int] = None
|
||||
self._frame_fin = False
|
||||
self._frame_opcode = None # type: Optional[int]
|
||||
self._frame_opcode: Optional[int] = None
|
||||
self._frame_payload = bytearray()
|
||||
|
||||
self._tail = b""
|
||||
self._has_mask = False
|
||||
self._frame_mask = None # type: Optional[bytes]
|
||||
self._frame_mask: Optional[bytes] = None
|
||||
self._payload_length = 0
|
||||
self._payload_length_flag = 0
|
||||
self._compressed = None # type: Optional[bool]
|
||||
self._decompressobj = None # type: Any # zlib.decompressobj actually
|
||||
self._compressed: Optional[bool] = None
|
||||
self._decompressobj: Any = None # zlib.decompressobj actually
|
||||
self._compress = compress
|
||||
|
||||
def feed_eof(self) -> None:
|
||||
|
@ -588,7 +591,7 @@ class WebSocketWriter:
|
|||
self._closing = False
|
||||
self._limit = limit
|
||||
self._output_size = 0
|
||||
self._compressobj = None # type: Any # actually compressobj
|
||||
self._compressobj: Any = None # actually compressobj
|
||||
|
||||
async def _send_frame(
|
||||
self, message: bytes, opcode: int, compress: Optional[int] = None
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
"""Http related parsers and protocol."""
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import zlib
|
||||
from typing import Any, Awaitable, Callable, Optional, Union # noqa
|
||||
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
||||
|
||||
from multidict import CIMultiDict
|
||||
|
||||
|
@ -13,12 +12,18 @@ from .helpers import NO_EXTENSIONS
|
|||
|
||||
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
||||
|
||||
HttpVersion = collections.namedtuple("HttpVersion", ["major", "minor"])
|
||||
|
||||
class HttpVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
HttpVersion10 = HttpVersion(1, 0)
|
||||
HttpVersion11 = HttpVersion(1, 1)
|
||||
|
||||
|
||||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
||||
|
||||
|
||||
class StreamWriter(AbstractStreamWriter):
|
||||
|
@ -27,9 +32,9 @@ class StreamWriter(AbstractStreamWriter):
|
|||
protocol: BaseProtocol,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
on_chunk_sent: _T_OnChunkSent = None,
|
||||
on_headers_sent: _T_OnHeadersSent = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._transport = protocol.transport
|
||||
|
||||
self.loop = loop
|
||||
self.length = None
|
||||
|
@ -38,14 +43,15 @@ class StreamWriter(AbstractStreamWriter):
|
|||
self.output_size = 0
|
||||
|
||||
self._eof = False
|
||||
self._compress = None # type: Any
|
||||
self._compress: Any = None
|
||||
self._drain_waiter = None
|
||||
|
||||
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
|
||||
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
||||
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
||||
|
||||
@property
|
||||
def transport(self) -> Optional[asyncio.Transport]:
|
||||
return self._transport
|
||||
return self._protocol.transport
|
||||
|
||||
@property
|
||||
def protocol(self) -> BaseProtocol:
|
||||
|
@ -54,18 +60,20 @@ class StreamWriter(AbstractStreamWriter):
|
|||
def enable_chunking(self) -> None:
|
||||
self.chunked = True
|
||||
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode)
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
def _write(self, chunk: bytes) -> None:
|
||||
size = len(chunk)
|
||||
self.buffer_size += size
|
||||
self.output_size += size
|
||||
|
||||
if self._transport is None or self._transport.is_closing():
|
||||
transport = self.transport
|
||||
if not self._protocol.connected or transport is None or transport.is_closing():
|
||||
raise ConnectionResetError("Cannot write to closing transport")
|
||||
self._transport.write(chunk)
|
||||
transport.write(chunk)
|
||||
|
||||
async def write(
|
||||
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
||||
|
@ -114,6 +122,9 @@ class StreamWriter(AbstractStreamWriter):
|
|||
self, status_line: str, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
"""Write request/response status and headers."""
|
||||
if self._on_headers_sent is not None:
|
||||
await self._on_headers_sent(headers)
|
||||
|
||||
# status + headers
|
||||
buf = _serialize_headers(status_line, headers)
|
||||
self._write(buf)
|
||||
|
@ -147,7 +158,6 @@ class StreamWriter(AbstractStreamWriter):
|
|||
await self.drain()
|
||||
|
||||
self._eof = True
|
||||
self._transport = None
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""Flush the write buffer.
|
||||
|
@ -161,19 +171,25 @@ class StreamWriter(AbstractStreamWriter):
|
|||
await self._protocol._drain_helper()
|
||||
|
||||
|
||||
def _safe_header(string: str) -> str:
|
||||
if "\r" in string or "\n" in string:
|
||||
raise ValueError(
|
||||
"Newline or carriage return detected in headers. "
|
||||
"Potential header injection attack."
|
||||
)
|
||||
return string
|
||||
|
||||
|
||||
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
||||
line = (
|
||||
status_line
|
||||
+ "\r\n"
|
||||
+ "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
|
||||
)
|
||||
return line.encode("utf-8") + b"\r\n"
|
||||
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
||||
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
||||
return line.encode("utf-8")
|
||||
|
||||
|
||||
_serialize_headers = _py_serialize_headers
|
||||
|
||||
try:
|
||||
import aiohttp._http_writer as _http_writer # type: ignore
|
||||
import aiohttp._http_writer as _http_writer # type: ignore[import]
|
||||
|
||||
_c_serialize_headers = _http_writer._serialize_headers
|
||||
if not NO_EXTENSIONS:
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
import asyncio
|
||||
import collections
|
||||
from typing import Any, Optional
|
||||
|
||||
try:
|
||||
from typing import Deque
|
||||
except ImportError:
|
||||
from typing_extensions import Deque
|
||||
from typing import Any, Deque, Optional
|
||||
|
||||
|
||||
class EventResultOrError:
|
||||
"""
|
||||
This class wrappers the Event asyncio lock allowing either awake the
|
||||
"""Event asyncio lock helper class.
|
||||
|
||||
Wraps the Event asyncio lock allowing either to awake the
|
||||
locked Tasks without any error or raising an exception.
|
||||
|
||||
thanks to @vorpalsmith for the simple design.
|
||||
|
@ -18,9 +14,9 @@ class EventResultOrError:
|
|||
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._exc = None # type: Optional[BaseException]
|
||||
self._exc: Optional[BaseException] = None
|
||||
self._event = asyncio.Event()
|
||||
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
|
||||
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
|
||||
|
||||
def set(self, exc: Optional[BaseException] = None) -> None:
|
||||
self._exc = exc
|
||||
|
@ -40,6 +36,6 @@ class EventResultOrError:
|
|||
return val
|
||||
|
||||
def cancel(self) -> None:
|
||||
""" Cancel all waiters """
|
||||
"""Cancel all waiters"""
|
||||
for waiter in self._waiters:
|
||||
waiter.cancel()
|
||||
|
|
|
@ -11,6 +11,7 @@ from typing import (
|
|||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
|
@ -20,6 +21,7 @@ from typing import (
|
|||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import parse_qsl, unquote, urlencode
|
||||
|
||||
|
@ -101,7 +103,7 @@ def parse_content_disposition(
|
|||
warnings.warn(BadContentDispositionHeader(header))
|
||||
return None, {}
|
||||
|
||||
params = {} # type: Dict[str, str]
|
||||
params: Dict[str, str] = {}
|
||||
while parts:
|
||||
item = parts.pop(0)
|
||||
|
||||
|
@ -152,7 +154,7 @@ def parse_content_disposition(
|
|||
elif parts:
|
||||
# maybe just ; in filename, in any case this is just
|
||||
# one case fix, for proper fix we need to redesign parser
|
||||
_value = "{};{}".format(value, parts[0])
|
||||
_value = f"{value};{parts[0]}"
|
||||
if is_quoted(_value):
|
||||
parts.pop(0)
|
||||
value = unescape(_value[1:-1].lstrip("\\/"))
|
||||
|
@ -240,8 +242,10 @@ class MultipartResponseWrapper:
|
|||
return item
|
||||
|
||||
async def release(self) -> None:
|
||||
"""Releases the connection gracefully, reading all the content
|
||||
to the void."""
|
||||
"""Release the connection gracefully.
|
||||
|
||||
All remaining content is read to the void.
|
||||
"""
|
||||
await self.resp.release()
|
||||
|
||||
|
||||
|
@ -261,13 +265,13 @@ class BodyPartReader:
|
|||
self._length = int(length) if length is not None else None
|
||||
self._read_bytes = 0
|
||||
# TODO: typeing.Deque is not supported by Python 3.5
|
||||
self._unread = deque() # type: Any
|
||||
self._prev_chunk = None # type: Optional[bytes]
|
||||
self._unread: Deque[bytes] = deque()
|
||||
self._prev_chunk: Optional[bytes] = None
|
||||
self._content_eof = 0
|
||||
self._cache = {} # type: Dict[str, Any]
|
||||
self._cache: Dict[str, Any] = {}
|
||||
|
||||
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
part = await self.next()
|
||||
|
@ -411,12 +415,10 @@ class BodyPartReader:
|
|||
if not data:
|
||||
return None
|
||||
encoding = encoding or self.get_charset(default="utf-8")
|
||||
return json.loads(data.decode(encoding))
|
||||
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
||||
|
||||
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
||||
"""Like read(), but assumes that body parts contains form
|
||||
urlencoded data.
|
||||
"""
|
||||
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
||||
data = await self.read(decode=True)
|
||||
if not data:
|
||||
return []
|
||||
|
@ -435,7 +437,9 @@ class BodyPartReader:
|
|||
return self._at_eof
|
||||
|
||||
def decode(self, data: bytes) -> bytes:
|
||||
"""Decodes data according the specified Content-Encoding
|
||||
"""Decodes data.
|
||||
|
||||
Decoding is done according the specified Content-Encoding
|
||||
or Content-Transfer-Encoding headers value.
|
||||
"""
|
||||
if CONTENT_TRANSFER_ENCODING in self.headers:
|
||||
|
@ -478,17 +482,18 @@ class BodyPartReader:
|
|||
|
||||
@reify
|
||||
def name(self) -> Optional[str]:
|
||||
"""Returns name specified in Content-Disposition header or None
|
||||
if missed or header is malformed.
|
||||
"""
|
||||
"""Returns name specified in Content-Disposition header.
|
||||
|
||||
If the header is missing or malformed, returns None.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "name")
|
||||
|
||||
@reify
|
||||
def filename(self) -> Optional[str]:
|
||||
"""Returns filename specified in Content-Disposition header or None
|
||||
if missed or header is malformed.
|
||||
"""Returns filename specified in Content-Disposition header.
|
||||
|
||||
Returns None if the header is missing or malformed.
|
||||
"""
|
||||
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
||||
return content_disposition_filename(params, "filename")
|
||||
|
@ -499,7 +504,7 @@ class BodyPartReaderPayload(Payload):
|
|||
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(value, *args, **kwargs)
|
||||
|
||||
params = {} # type: Dict[str, str]
|
||||
params: Dict[str, str] = {}
|
||||
if value.name is not None:
|
||||
params["name"] = value.name
|
||||
if value.filename is not None:
|
||||
|
@ -510,10 +515,10 @@ class BodyPartReaderPayload(Payload):
|
|||
|
||||
async def write(self, writer: Any) -> None:
|
||||
field = self._value
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
while chunk:
|
||||
await writer.write(field.decode(chunk))
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
|
||||
|
||||
class MultipartReader:
|
||||
|
@ -531,17 +536,15 @@ class MultipartReader:
|
|||
self.headers = headers
|
||||
self._boundary = ("--" + self._get_boundary()).encode()
|
||||
self._content = content
|
||||
self._last_part = (
|
||||
None
|
||||
) # type: Optional[Union['MultipartReader', BodyPartReader]]
|
||||
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
||||
self._at_eof = False
|
||||
self._at_bof = True
|
||||
self._unread = [] # type: List[bytes]
|
||||
self._unread: List[bytes] = []
|
||||
|
||||
def __aiter__(
|
||||
self,
|
||||
) -> AsyncIterator["BodyPartReader"]:
|
||||
return self # type: ignore
|
||||
return self # type: ignore[return-value]
|
||||
|
||||
async def __anext__(
|
||||
self,
|
||||
|
@ -566,9 +569,7 @@ class MultipartReader:
|
|||
return obj
|
||||
|
||||
def at_eof(self) -> bool:
|
||||
"""Returns True if the final boundary was reached or
|
||||
False otherwise.
|
||||
"""
|
||||
"""Returns True if the final boundary was reached, false otherwise."""
|
||||
return self._at_eof
|
||||
|
||||
async def next(
|
||||
|
@ -608,8 +609,9 @@ class MultipartReader:
|
|||
self,
|
||||
headers: "CIMultiDictProxy[str]",
|
||||
) -> Union["MultipartReader", BodyPartReader]:
|
||||
"""Dispatches the response by the `Content-Type` header, returning
|
||||
suitable reader instance.
|
||||
"""Dispatches the response by the `Content-Type` header.
|
||||
|
||||
Returns a suitable reader instance.
|
||||
|
||||
:param dict headers: Response headers
|
||||
"""
|
||||
|
@ -723,7 +725,7 @@ class MultipartWriter(Payload):
|
|||
|
||||
super().__init__(None, content_type=ctype)
|
||||
|
||||
self._parts = [] # type: List[_Part]
|
||||
self._parts: List[_Part] = []
|
||||
|
||||
def __enter__(self) -> "MultipartWriter":
|
||||
return self
|
||||
|
@ -745,8 +747,8 @@ class MultipartWriter(Payload):
|
|||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
||||
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
|
||||
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
||||
|
||||
@property
|
||||
def _boundary_value(self) -> str:
|
||||
|
@ -802,20 +804,20 @@ class MultipartWriter(Payload):
|
|||
def append_payload(self, payload: Payload) -> Payload:
|
||||
"""Adds a new body part to multipart writer."""
|
||||
# compression
|
||||
encoding = payload.headers.get(
|
||||
encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_ENCODING,
|
||||
"",
|
||||
).lower() # type: Optional[str]
|
||||
).lower()
|
||||
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
||||
raise RuntimeError(f"unknown content encoding: {encoding}")
|
||||
if encoding == "identity":
|
||||
encoding = None
|
||||
|
||||
# te encoding
|
||||
te_encoding = payload.headers.get(
|
||||
te_encoding: Optional[str] = payload.headers.get(
|
||||
CONTENT_TRANSFER_ENCODING,
|
||||
"",
|
||||
).lower() # type: Optional[str]
|
||||
).lower()
|
||||
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
||||
raise RuntimeError(
|
||||
"unknown content transfer encoding: {}" "".format(te_encoding)
|
||||
|
@ -828,7 +830,7 @@ class MultipartWriter(Payload):
|
|||
if size is not None and not (encoding or te_encoding):
|
||||
payload.headers[CONTENT_LENGTH] = str(size)
|
||||
|
||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore
|
||||
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
||||
return payload
|
||||
|
||||
def append_json(
|
||||
|
@ -893,7 +895,7 @@ class MultipartWriter(Payload):
|
|||
w.enable_compression(encoding)
|
||||
if te_encoding:
|
||||
w.enable_encoding(te_encoding)
|
||||
await part.write(w) # type: ignore
|
||||
await part.write(w) # type: ignore[arg-type]
|
||||
await w.write_eof()
|
||||
else:
|
||||
await part.write(writer)
|
||||
|
@ -907,9 +909,9 @@ class MultipartWriter(Payload):
|
|||
class MultipartPayloadWriter:
|
||||
def __init__(self, writer: Any) -> None:
|
||||
self._writer = writer
|
||||
self._encoding = None # type: Optional[str]
|
||||
self._compress = None # type: Any
|
||||
self._encoding_buffer = None # type: Optional[bytearray]
|
||||
self._encoding: Optional[str] = None
|
||||
self._compress: Any = None
|
||||
self._encoding_buffer: Optional[bytearray] = None
|
||||
|
||||
def enable_encoding(self, encoding: str) -> None:
|
||||
if encoding == "base64":
|
||||
|
@ -918,9 +920,11 @@ class MultipartPayloadWriter:
|
|||
elif encoding == "quoted-printable":
|
||||
self._encoding = "quoted-printable"
|
||||
|
||||
def enable_compression(self, encoding: str = "deflate") -> None:
|
||||
def enable_compression(
|
||||
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
||||
) -> None:
|
||||
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode)
|
||||
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
||||
|
||||
async def write_eof(self) -> None:
|
||||
if self._compress is not None:
|
||||
|
|
|
@ -15,7 +15,6 @@ from typing import (
|
|||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Text,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
|
@ -34,7 +33,7 @@ from .helpers import (
|
|||
sentinel,
|
||||
)
|
||||
from .streams import StreamReader
|
||||
from .typedefs import JSONEncoder, _CIMultiDict
|
||||
from .typedefs import Final, JSONEncoder, _CIMultiDict
|
||||
|
||||
__all__ = (
|
||||
"PAYLOAD_REGISTRY",
|
||||
|
@ -52,8 +51,7 @@ __all__ = (
|
|||
"AsyncIterablePayload",
|
||||
)
|
||||
|
||||
TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB
|
||||
|
||||
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import List
|
||||
|
@ -89,6 +87,10 @@ class payload_type:
|
|||
return factory
|
||||
|
||||
|
||||
PayloadType = Type["Payload"]
|
||||
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
||||
|
||||
|
||||
class PayloadRegistry:
|
||||
"""Payload registry.
|
||||
|
||||
|
@ -96,12 +98,16 @@ class PayloadRegistry:
|
|||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._first = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
self._normal = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
self._last = [] # type: List[Tuple[Type[Payload], Any]]
|
||||
self._first: List[_PayloadRegistryItem] = []
|
||||
self._normal: List[_PayloadRegistryItem] = []
|
||||
self._last: List[_PayloadRegistryItem] = []
|
||||
|
||||
def get(
|
||||
self, data: Any, *args: Any, _CHAIN: Any = chain, **kwargs: Any
|
||||
self,
|
||||
data: Any,
|
||||
*args: Any,
|
||||
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
||||
**kwargs: Any,
|
||||
) -> "Payload":
|
||||
if isinstance(data, Payload):
|
||||
return data
|
||||
|
@ -112,7 +118,7 @@ class PayloadRegistry:
|
|||
raise LookupError()
|
||||
|
||||
def register(
|
||||
self, factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
||||
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
||||
) -> None:
|
||||
if order is Order.try_first:
|
||||
self._first.append((factory, type))
|
||||
|
@ -126,8 +132,8 @@ class PayloadRegistry:
|
|||
|
||||
class Payload(ABC):
|
||||
|
||||
_default_content_type = "application/octet-stream" # type: str
|
||||
_size = None # type: Optional[int]
|
||||
_default_content_type: str = "application/octet-stream"
|
||||
_size: Optional[int] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -142,7 +148,7 @@ class Payload(ABC):
|
|||
) -> None:
|
||||
self._encoding = encoding
|
||||
self._filename = filename
|
||||
self._headers = CIMultiDict() # type: _CIMultiDict
|
||||
self._headers: _CIMultiDict = CIMultiDict()
|
||||
self._value = value
|
||||
if content_type is not sentinel and content_type is not None:
|
||||
self._headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
@ -190,11 +196,15 @@ class Payload(ABC):
|
|||
return self._headers[hdrs.CONTENT_TYPE]
|
||||
|
||||
def set_content_disposition(
|
||||
self, disptype: str, quote_fields: bool = True, **params: Any
|
||||
self,
|
||||
disptype: str,
|
||||
quote_fields: bool = True,
|
||||
_charset: str = "utf-8",
|
||||
**params: Any,
|
||||
) -> None:
|
||||
"""Sets ``Content-Disposition`` header."""
|
||||
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
||||
disptype, quote_fields=quote_fields, **params
|
||||
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
|
@ -208,9 +218,7 @@ class Payload(ABC):
|
|||
class BytesPayload(Payload):
|
||||
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
||||
raise TypeError(
|
||||
"value argument must be byte-ish, not {!r}".format(type(value))
|
||||
)
|
||||
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
||||
|
||||
if "content_type" not in kwargs:
|
||||
kwargs["content_type"] = "application/octet-stream"
|
||||
|
@ -242,7 +250,7 @@ class BytesPayload(Payload):
|
|||
class StringPayload(BytesPayload):
|
||||
def __init__(
|
||||
self,
|
||||
value: Text,
|
||||
value: str,
|
||||
*args: Any,
|
||||
encoding: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
|
@ -276,6 +284,8 @@ class StringIOPayload(StringPayload):
|
|||
|
||||
|
||||
class IOBasePayload(Payload):
|
||||
_value: IO[Any]
|
||||
|
||||
def __init__(
|
||||
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
|
@ -291,15 +301,17 @@ class IOBasePayload(Payload):
|
|||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
||||
class TextIOPayload(IOBasePayload):
|
||||
_value: TextIO
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: TextIO,
|
||||
|
@ -338,10 +350,15 @@ class TextIOPayload(IOBasePayload):
|
|||
async def write(self, writer: AbstractStreamWriter) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
while chunk:
|
||||
await writer.write(chunk.encode(self._encoding))
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
||||
data = (
|
||||
chunk.encode(encoding=self._encoding)
|
||||
if self._encoding
|
||||
else chunk.encode()
|
||||
)
|
||||
await writer.write(data)
|
||||
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
||||
finally:
|
||||
await loop.run_in_executor(None, self._value.close)
|
||||
|
||||
|
@ -400,13 +417,13 @@ else:
|
|||
|
||||
class AsyncIterablePayload(Payload):
|
||||
|
||||
_iter = None # type: Optional[_AsyncIterator]
|
||||
_iter: Optional[_AsyncIterator] = None
|
||||
|
||||
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
||||
if not isinstance(value, AsyncIterable):
|
||||
raise TypeError(
|
||||
"value argument must support "
|
||||
"collections.abc.AsyncIterablebe interface, "
|
||||
"collections.abc.AsyncIterable interface, "
|
||||
"got {!r}".format(type(value))
|
||||
)
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" Payload implemenation for coroutines as data provider.
|
||||
"""
|
||||
Payload implemenation for coroutines as data provider.
|
||||
|
||||
As a simple case, you can upload data from file::
|
||||
|
||||
|
@ -43,7 +44,7 @@ class _stream_wrapper:
|
|||
self.kwargs = kwargs
|
||||
|
||||
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
||||
await self.coro(writer, *self.args, **self.kwargs) # type: ignore
|
||||
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
|
||||
|
||||
|
||||
class streamer:
|
||||
|
|
|
@ -2,6 +2,7 @@ import asyncio
|
|||
import contextlib
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -29,8 +30,10 @@ try:
|
|||
except ImportError: # pragma: no cover
|
||||
tokio = None
|
||||
|
||||
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
||||
|
||||
def pytest_addoption(parser): # type: ignore
|
||||
|
||||
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
||||
parser.addoption(
|
||||
"--aiohttp-fast",
|
||||
action="store_true",
|
||||
|
@ -51,8 +54,9 @@ def pytest_addoption(parser): # type: ignore
|
|||
)
|
||||
|
||||
|
||||
def pytest_fixture_setup(fixturedef): # type: ignore
|
||||
"""
|
||||
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
||||
"""Set up pytest fixture.
|
||||
|
||||
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
||||
"""
|
||||
func = fixturedef.func
|
||||
|
@ -72,7 +76,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore
|
|||
fixturedef.argnames += ("request",)
|
||||
strip_request = True
|
||||
|
||||
def wrapper(*args, **kwargs): # type: ignore
|
||||
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
||||
request = kwargs["request"]
|
||||
if strip_request:
|
||||
del kwargs["request"]
|
||||
|
@ -93,7 +97,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore
|
|||
# then advance it again in a finalizer
|
||||
gen = func(*args, **kwargs)
|
||||
|
||||
def finalizer(): # type: ignore
|
||||
def finalizer(): # type: ignore[no-untyped-def]
|
||||
try:
|
||||
return _loop.run_until_complete(gen.__anext__())
|
||||
except StopAsyncIteration:
|
||||
|
@ -108,21 +112,22 @@ def pytest_fixture_setup(fixturedef): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def fast(request): # type: ignore
|
||||
def fast(request): # type: ignore[no-untyped-def]
|
||||
"""--fast config option"""
|
||||
return request.config.getoption("--aiohttp-fast")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def loop_debug(request): # type: ignore
|
||||
def loop_debug(request): # type: ignore[no-untyped-def]
|
||||
"""--enable-loop-debug config option"""
|
||||
return request.config.getoption("--aiohttp-enable-loop-debug")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _runtime_warning_context(): # type: ignore
|
||||
"""
|
||||
Context manager which checks for RuntimeWarnings, specifically to
|
||||
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
||||
"""Context manager which checks for RuntimeWarnings.
|
||||
|
||||
This exists specifically to
|
||||
avoid "coroutine 'X' was never awaited" warnings being missed.
|
||||
|
||||
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
||||
|
@ -143,9 +148,10 @@ def _runtime_warning_context(): # type: ignore
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _passthrough_loop_context(loop, fast=False): # type: ignore
|
||||
"""
|
||||
setups and tears down a loop unless one is passed in via the loop
|
||||
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
||||
"""Passthrough loop context.
|
||||
|
||||
Sets up and tears down a loop unless one is passed in via the loop
|
||||
argument when it's passed straight through.
|
||||
"""
|
||||
if loop:
|
||||
|
@ -158,18 +164,14 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore
|
|||
teardown_test_loop(loop, fast=fast)
|
||||
|
||||
|
||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore
|
||||
"""
|
||||
Fix pytest collecting for coroutines.
|
||||
"""
|
||||
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
||||
"""Fix pytest collecting for coroutines."""
|
||||
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
||||
return list(collector._genfunctions(name, obj))
|
||||
|
||||
|
||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore
|
||||
"""
|
||||
Run coroutines in an event loop instead of a normal function call.
|
||||
"""
|
||||
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
||||
"""Run coroutines in an event loop instead of a normal function call."""
|
||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
||||
existing_loop = pyfuncitem.funcargs.get(
|
||||
|
@ -186,7 +188,7 @@ def pytest_pyfunc_call(pyfuncitem): # type: ignore
|
|||
return True
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc): # type: ignore
|
||||
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
||||
if "loop_factory" not in metafunc.fixturenames:
|
||||
return
|
||||
|
||||
|
@ -202,7 +204,7 @@ def pytest_generate_tests(metafunc): # type: ignore
|
|||
if loops == "all":
|
||||
loops = "pyloop,uvloop?,tokio?"
|
||||
|
||||
factories = {} # type: ignore
|
||||
factories = {} # type: ignore[var-annotated]
|
||||
for name in loops.split(","):
|
||||
required = not name.endswith("?")
|
||||
name = name.strip(" ?")
|
||||
|
@ -221,7 +223,7 @@ def pytest_generate_tests(metafunc): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def loop(loop_factory, fast, loop_debug): # type: ignore
|
||||
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
||||
"""Return an instance of the event loop."""
|
||||
policy = loop_factory()
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
|
@ -233,12 +235,12 @@ def loop(loop_factory, fast, loop_debug): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def proactor_loop(): # type: ignore
|
||||
def proactor_loop(): # type: ignore[no-untyped-def]
|
||||
if not PY_37:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore
|
||||
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
||||
else:
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
||||
asyncio.set_event_loop_policy(policy)
|
||||
|
||||
with loop_context(policy.new_event_loop) as _loop:
|
||||
|
@ -247,7 +249,7 @@ def proactor_loop(): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
|
||||
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_unused_port fixture instead",
|
||||
DeprecationWarning,
|
||||
|
@ -257,20 +259,20 @@ def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_unused_port(): # type: ignore
|
||||
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
|
||||
"""Return a port that is unused on the current host."""
|
||||
return _unused_port
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_server(loop): # type: ignore
|
||||
def aiohttp_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a TestServer instance, given an app.
|
||||
|
||||
aiohttp_server(app, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(app, *, port=None, **kwargs): # type: ignore
|
||||
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = TestServer(app, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
|
@ -278,7 +280,7 @@ def aiohttp_server(loop): # type: ignore
|
|||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
|
@ -286,7 +288,7 @@ def aiohttp_server(loop): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def test_server(aiohttp_server): # type: ignore # pragma: no cover
|
||||
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_server fixture instead",
|
||||
DeprecationWarning,
|
||||
|
@ -296,14 +298,14 @@ def test_server(aiohttp_server): # type: ignore # pragma: no cover
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_raw_server(loop): # type: ignore
|
||||
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
|
||||
"""Factory to create a RawTestServer instance, given a web handler.
|
||||
|
||||
aiohttp_raw_server(handler, **kwargs)
|
||||
"""
|
||||
servers = []
|
||||
|
||||
async def go(handler, *, port=None, **kwargs): # type: ignore
|
||||
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
||||
server = RawTestServer(handler, port=port)
|
||||
await server.start_server(loop=loop, **kwargs)
|
||||
servers.append(server)
|
||||
|
@ -311,7 +313,7 @@ def aiohttp_raw_server(loop): # type: ignore
|
|||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
async def finalize() -> None:
|
||||
while servers:
|
||||
await servers.pop().close()
|
||||
|
||||
|
@ -319,7 +321,9 @@ def aiohttp_raw_server(loop): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
|
||||
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
||||
aiohttp_raw_server,
|
||||
):
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_raw_server fixture instead",
|
||||
DeprecationWarning,
|
||||
|
@ -329,7 +333,9 @@ def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(loop): # type: ignore
|
||||
def aiohttp_client(
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> Generator[AiohttpClient, None, None]:
|
||||
"""Factory to create a TestClient instance.
|
||||
|
||||
aiohttp_client(app, **kwargs)
|
||||
|
@ -338,9 +344,14 @@ def aiohttp_client(loop): # type: ignore
|
|||
"""
|
||||
clients = []
|
||||
|
||||
async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore
|
||||
async def go(
|
||||
__param: Union[Application, BaseTestServer],
|
||||
*args: Any,
|
||||
server_kwargs: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any
|
||||
) -> TestClient:
|
||||
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
||||
__param, (Application, BaseTestServer)
|
||||
):
|
||||
__param = __param(loop, *args, **kwargs)
|
||||
|
@ -363,7 +374,7 @@ def aiohttp_client(loop): # type: ignore
|
|||
|
||||
yield go
|
||||
|
||||
async def finalize(): # type: ignore
|
||||
async def finalize() -> None:
|
||||
while clients:
|
||||
await clients.pop().close()
|
||||
|
||||
|
@ -371,7 +382,7 @@ def aiohttp_client(loop): # type: ignore
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client(aiohttp_client): # type: ignore # pragma: no cover
|
||||
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
||||
warnings.warn(
|
||||
"Deprecated, use aiohttp_client fixture instead",
|
||||
DeprecationWarning,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import asyncio
|
||||
import socket
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
|
||||
from .abc import AbstractResolver
|
||||
from .helpers import get_running_loop
|
||||
|
@ -18,8 +18,10 @@ aiodns_default = False
|
|||
|
||||
|
||||
class ThreadedResolver(AbstractResolver):
|
||||
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
|
||||
concurrent.futures.ThreadPoolExecutor.
|
||||
"""Threaded resolver.
|
||||
|
||||
Uses an Executor for synchronous getaddrinfo() calls.
|
||||
concurrent.futures.ThreadPoolExecutor is used by default.
|
||||
"""
|
||||
|
||||
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
||||
|
@ -38,16 +40,24 @@ class ThreadedResolver(AbstractResolver):
|
|||
|
||||
hosts = []
|
||||
for family, _, proto, _, address in infos:
|
||||
if family == socket.AF_INET6 and address[3]: # type: ignore
|
||||
# This is essential for link-local IPv6 addresses.
|
||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
||||
# getnameinfo() unconditionally, but performance makes sense.
|
||||
host, _port = socket.getnameinfo(
|
||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
||||
)
|
||||
port = int(_port)
|
||||
else:
|
||||
host, port = address[:2]
|
||||
if family == socket.AF_INET6:
|
||||
if len(address) < 3:
|
||||
# IPv6 is not supported by Python build,
|
||||
# or IPv6 is not enabled in the host
|
||||
continue
|
||||
if address[3]: # type: ignore[misc]
|
||||
# This is essential for link-local IPv6 addresses.
|
||||
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
||||
# getnameinfo() unconditionally, but performance makes sense.
|
||||
host, _port = socket.getnameinfo(
|
||||
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
||||
)
|
||||
port = int(_port)
|
||||
else:
|
||||
host, port = address[:2]
|
||||
else: # IPv4
|
||||
assert family == socket.AF_INET
|
||||
host, port = address # type: ignore[misc]
|
||||
hosts.append(
|
||||
{
|
||||
"hostname": hostname,
|
||||
|
@ -143,7 +153,8 @@ class AsyncResolver(AbstractResolver):
|
|||
return hosts
|
||||
|
||||
async def close(self) -> None:
|
||||
return self._resolver.cancel()
|
||||
self._resolver.cancel()
|
||||
|
||||
|
||||
DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver
|
||||
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
||||
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
import asyncio
|
||||
import collections
|
||||
import warnings
|
||||
from typing import Awaitable, Callable, Generic, List, Optional, Tuple, TypeVar
|
||||
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
|
||||
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import BaseTimerContext, set_exception, set_result
|
||||
from .log import internal_logger
|
||||
|
||||
try: # pragma: no cover
|
||||
from typing import Deque
|
||||
except ImportError:
|
||||
from typing_extensions import Deque
|
||||
from .typedefs import Final
|
||||
|
||||
__all__ = (
|
||||
"EMPTY_PAYLOAD",
|
||||
|
@ -60,31 +56,33 @@ class ChunkTupleAsyncStreamIterator:
|
|||
|
||||
class AsyncStreamReaderMixin:
|
||||
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
||||
return AsyncStreamIterator(self.readline) # type: ignore
|
||||
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields chunks of size n.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore
|
||||
return AsyncStreamIterator(
|
||||
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
|
||||
)
|
||||
|
||||
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
||||
"""Returns an asynchronous iterator that yields all the available
|
||||
data as soon as it is received
|
||||
"""Yield all available data as soon as it is received.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return AsyncStreamIterator(self.readany) # type: ignore
|
||||
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
||||
|
||||
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
||||
"""Returns an asynchronous iterator that yields chunks of data
|
||||
as they are received by the server. The yielded objects are tuples
|
||||
"""Yield chunks of data as they are received by the server.
|
||||
|
||||
The yielded objects are tuples
|
||||
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
||||
|
||||
Python-3.5 available for Python 3.5+ only
|
||||
"""
|
||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore
|
||||
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class StreamReader(AsyncStreamReaderMixin):
|
||||
|
@ -109,7 +107,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
limit: int,
|
||||
*,
|
||||
timer: Optional[BaseTimerContext] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._low_water = limit
|
||||
|
@ -119,15 +117,15 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
self._loop = loop
|
||||
self._size = 0
|
||||
self._cursor = 0
|
||||
self._http_chunk_splits = None # type: Optional[List[int]]
|
||||
self._buffer = collections.deque() # type: Deque[bytes]
|
||||
self._http_chunk_splits: Optional[List[int]] = None
|
||||
self._buffer: Deque[bytes] = collections.deque()
|
||||
self._buffer_offset = 0
|
||||
self._eof = False
|
||||
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._eof_waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._timer = timer
|
||||
self._eof_callbacks = [] # type: List[Callable[[], None]]
|
||||
self._eof_callbacks: List[Callable[[], None]] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
info = [self.__class__.__name__]
|
||||
|
@ -135,7 +133,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
info.append("%d bytes" % self._size)
|
||||
if self._eof:
|
||||
info.append("eof")
|
||||
if self._low_water != 2 ** 16: # default limit
|
||||
if self._low_water != 2**16: # default limit
|
||||
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
||||
if self._waiter:
|
||||
info.append("w=%r" % self._waiter)
|
||||
|
@ -310,34 +308,41 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
self._waiter = None
|
||||
|
||||
async def readline(self) -> bytes:
|
||||
return await self.readuntil()
|
||||
|
||||
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
||||
seplen = len(separator)
|
||||
if seplen == 0:
|
||||
raise ValueError("Separator should be at least one-byte string")
|
||||
|
||||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
line = []
|
||||
line_size = 0
|
||||
chunk = b""
|
||||
chunk_size = 0
|
||||
not_enough = True
|
||||
|
||||
while not_enough:
|
||||
while self._buffer and not_enough:
|
||||
offset = self._buffer_offset
|
||||
ichar = self._buffer[0].find(b"\n", offset) + 1
|
||||
# Read from current offset to found b'\n' or to the end.
|
||||
ichar = self._buffer[0].find(separator, offset) + 1
|
||||
# Read from current offset to found separator or to the end.
|
||||
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
|
||||
line.append(data)
|
||||
line_size += len(data)
|
||||
chunk += data
|
||||
chunk_size += len(data)
|
||||
if ichar:
|
||||
not_enough = False
|
||||
|
||||
if line_size > self._high_water:
|
||||
raise ValueError("Line is too long")
|
||||
if chunk_size > self._high_water:
|
||||
raise ValueError("Chunk too big")
|
||||
|
||||
if self._eof:
|
||||
break
|
||||
|
||||
if not_enough:
|
||||
await self._wait("readline")
|
||||
await self._wait("readuntil")
|
||||
|
||||
return b"".join(line)
|
||||
return chunk
|
||||
|
||||
async def read(self, n: int = -1) -> bytes:
|
||||
if self._exception is not None:
|
||||
|
@ -394,7 +399,9 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
return self._read_nowait(-1)
|
||||
|
||||
async def readchunk(self) -> Tuple[bytes, bool]:
|
||||
"""Returns a tuple of (data, end_of_http_chunk). When chunked transfer
|
||||
"""Returns a tuple of (data, end_of_http_chunk).
|
||||
|
||||
When chunked transfer
|
||||
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
||||
always False.
|
||||
|
@ -429,7 +436,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
if self._exception is not None:
|
||||
raise self._exception
|
||||
|
||||
blocks = [] # type: List[bytes]
|
||||
blocks: List[bytes] = []
|
||||
while n > 0:
|
||||
block = await self.read(n)
|
||||
if not block:
|
||||
|
@ -483,7 +490,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
return data
|
||||
|
||||
def _read_nowait(self, n: int) -> bytes:
|
||||
""" Read not more than n bytes, or whole buffer if n == -1 """
|
||||
"""Read not more than n bytes, or whole buffer if n == -1"""
|
||||
chunks = []
|
||||
|
||||
while self._buffer:
|
||||
|
@ -497,7 +504,10 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||
return b"".join(chunks) if chunks else b""
|
||||
|
||||
|
||||
class EmptyStreamReader(AsyncStreamReaderMixin):
|
||||
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def exception(self) -> Optional[BaseException]:
|
||||
return None
|
||||
|
||||
|
@ -531,6 +541,8 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
|
|||
async def read(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
# TODO add async def readuntil
|
||||
|
||||
async def readany(self) -> bytes:
|
||||
return b""
|
||||
|
||||
|
@ -540,11 +552,11 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
|
|||
async def readexactly(self, n: int) -> bytes:
|
||||
raise asyncio.IncompleteReadError(b"", n)
|
||||
|
||||
def read_nowait(self) -> bytes:
|
||||
def read_nowait(self, n: int = -1) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
EMPTY_PAYLOAD = EmptyStreamReader()
|
||||
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
||||
|
||||
|
||||
class DataQueue(Generic[_T]):
|
||||
|
@ -553,10 +565,10 @@ class DataQueue(Generic[_T]):
|
|||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._loop = loop
|
||||
self._eof = False
|
||||
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._size = 0
|
||||
self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
|
||||
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._buffer)
|
||||
|
@ -623,7 +635,8 @@ class DataQueue(Generic[_T]):
|
|||
class FlowControlDataQueue(DataQueue[_T]):
|
||||
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
||||
|
||||
It is a destination for parsed data."""
|
||||
It is a destination for parsed data.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
||||
|
|
|
@ -15,7 +15,6 @@ if hasattr(socket, "SO_KEEPALIVE"):
|
|||
if sock is not None:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
||||
|
|
|
@ -2,35 +2,41 @@
|
|||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import functools
|
||||
import gc
|
||||
import inspect
|
||||
import ipaddress
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from unittest import mock
|
||||
|
||||
from aiosignal import Signal
|
||||
from multidict import CIMultiDict, CIMultiDictProxy
|
||||
from yarl import URL
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.client import (
|
||||
ClientResponse,
|
||||
_RequestContextManager,
|
||||
_WSRequestContextManager,
|
||||
)
|
||||
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
|
||||
|
||||
from . import ClientSession, hdrs
|
||||
from .abc import AbstractCookieJar
|
||||
from .client_reqrep import ClientResponse
|
||||
from .client_ws import ClientWebSocketResponse
|
||||
from .helpers import sentinel
|
||||
from .helpers import PY_38, sentinel
|
||||
from .http import HttpVersion, RawRequestMessage
|
||||
from .signals import Signal
|
||||
from .web import (
|
||||
Application,
|
||||
AppRunner,
|
||||
|
@ -48,16 +54,24 @@ if TYPE_CHECKING: # pragma: no cover
|
|||
else:
|
||||
SSLContext = None
|
||||
|
||||
if PY_38:
|
||||
from unittest import IsolatedAsyncioTestCase as TestCase
|
||||
else:
|
||||
from asynctest import TestCase # type: ignore[no-redef]
|
||||
|
||||
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
||||
|
||||
|
||||
def get_unused_port_socket(host: str) -> socket.socket:
|
||||
return get_port_socket(host, 0)
|
||||
def get_unused_port_socket(
|
||||
host: str, family: socket.AddressFamily = socket.AF_INET
|
||||
) -> socket.socket:
|
||||
return get_port_socket(host, 0, family)
|
||||
|
||||
|
||||
def get_port_socket(host: str, port: int) -> socket.socket:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
def get_port_socket(
|
||||
host: str, port: int, family: socket.AddressFamily
|
||||
) -> socket.socket:
|
||||
s = socket.socket(family, socket.SOCK_STREAM)
|
||||
if REUSE_ADDRESS:
|
||||
# Windows has different semantics for SO_REUSEADDR,
|
||||
# so don't set it. Ref:
|
||||
|
@ -71,7 +85,7 @@ def unused_port() -> int:
|
|||
"""Return a port that is unused on the current host."""
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.bind(("127.0.0.1", 0))
|
||||
return s.getsockname()[1]
|
||||
return cast(int, s.getsockname()[1])
|
||||
|
||||
|
||||
class BaseTestServer(ABC):
|
||||
|
@ -85,16 +99,20 @@ class BaseTestServer(ABC):
|
|||
host: str = "127.0.0.1",
|
||||
port: Optional[int] = None,
|
||||
skip_url_asserts: bool = False,
|
||||
socket_factory: Callable[
|
||||
[str, int, socket.AddressFamily], socket.socket
|
||||
] = get_port_socket,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._loop = loop
|
||||
self.runner = None # type: Optional[BaseRunner]
|
||||
self._root = None # type: Optional[URL]
|
||||
self.runner: Optional[BaseRunner] = None
|
||||
self._root: Optional[URL] = None
|
||||
self.host = host
|
||||
self.port = port
|
||||
self._closed = False
|
||||
self.scheme = scheme
|
||||
self.skip_url_asserts = skip_url_asserts
|
||||
self.socket_factory = socket_factory
|
||||
|
||||
async def start_server(
|
||||
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
||||
|
@ -107,7 +125,12 @@ class BaseTestServer(ABC):
|
|||
await self.runner.setup()
|
||||
if not self.port:
|
||||
self.port = 0
|
||||
_sock = get_port_socket(self.host, self.port)
|
||||
try:
|
||||
version = ipaddress.ip_address(self.host).version
|
||||
except ValueError:
|
||||
version = 4
|
||||
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
||||
_sock = self.socket_factory(self.host, self.port, family)
|
||||
self.host, self.port = _sock.getsockname()[:2]
|
||||
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
||||
await site.start()
|
||||
|
@ -261,8 +284,8 @@ class TestClient:
|
|||
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
||||
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
||||
self._closed = False
|
||||
self._responses = [] # type: List[ClientResponse]
|
||||
self._websockets = [] # type: List[ClientWebSocketResponse]
|
||||
self._responses: List[ClientResponse] = []
|
||||
self._websockets: List[ClientWebSocketResponse] = []
|
||||
|
||||
async def start_server(self) -> None:
|
||||
await self._server.start_server(loop=self._loop)
|
||||
|
@ -280,8 +303,8 @@ class TestClient:
|
|||
return self._server
|
||||
|
||||
@property
|
||||
def app(self) -> Application:
|
||||
return getattr(self._server, "app", None)
|
||||
def app(self) -> Optional[Application]:
|
||||
return cast(Optional[Application], getattr(self._server, "app", None))
|
||||
|
||||
@property
|
||||
def session(self) -> ClientSession:
|
||||
|
@ -400,9 +423,8 @@ class TestClient:
|
|||
await self.close()
|
||||
|
||||
|
||||
class AioHTTPTestCase(unittest.TestCase):
|
||||
"""A base class to allow for unittest web applications using
|
||||
aiohttp.
|
||||
class AioHTTPTestCase(TestCase):
|
||||
"""A base class to allow for unittest web applications using aiohttp.
|
||||
|
||||
Provides the following:
|
||||
|
||||
|
@ -417,43 +439,49 @@ class AioHTTPTestCase(unittest.TestCase):
|
|||
"""
|
||||
|
||||
async def get_application(self) -> Application:
|
||||
"""
|
||||
"""Get application.
|
||||
|
||||
This method should be overridden
|
||||
to return the aiohttp.web.Application
|
||||
object to test.
|
||||
|
||||
"""
|
||||
return self.get_app()
|
||||
|
||||
def get_app(self) -> Application:
|
||||
"""Obsolete method used to constructing web application.
|
||||
|
||||
Use .get_application() coroutine instead
|
||||
|
||||
Use .get_application() coroutine instead.
|
||||
"""
|
||||
raise RuntimeError("Did you forget to define get_application()?")
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.loop = setup_test_loop()
|
||||
if not PY_38:
|
||||
asyncio.get_event_loop().run_until_complete(self.asyncSetUp())
|
||||
|
||||
self.app = self.loop.run_until_complete(self.get_application())
|
||||
self.server = self.loop.run_until_complete(self.get_server(self.app))
|
||||
self.client = self.loop.run_until_complete(self.get_client(self.server))
|
||||
async def asyncSetUp(self) -> None:
|
||||
try:
|
||||
self.loop = asyncio.get_running_loop()
|
||||
except (AttributeError, RuntimeError): # AttributeError->py36
|
||||
self.loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
|
||||
self.loop.run_until_complete(self.client.start_server())
|
||||
|
||||
self.loop.run_until_complete(self.setUpAsync())
|
||||
return await self.setUpAsync()
|
||||
|
||||
async def setUpAsync(self) -> None:
|
||||
pass
|
||||
self.app = await self.get_application()
|
||||
self.server = await self.get_server(self.app)
|
||||
self.client = await self.get_client(self.server)
|
||||
|
||||
await self.client.start_server()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.loop.run_until_complete(self.tearDownAsync())
|
||||
self.loop.run_until_complete(self.client.close())
|
||||
teardown_test_loop(self.loop)
|
||||
if not PY_38:
|
||||
self.loop.run_until_complete(self.asyncTearDown())
|
||||
|
||||
async def asyncTearDown(self) -> None:
|
||||
return await self.tearDownAsync()
|
||||
|
||||
async def tearDownAsync(self) -> None:
|
||||
pass
|
||||
await self.client.close()
|
||||
|
||||
async def get_server(self, app: Application) -> TestServer:
|
||||
"""Return a TestServer instance."""
|
||||
|
@ -465,18 +493,17 @@ class AioHTTPTestCase(unittest.TestCase):
|
|||
|
||||
|
||||
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
||||
"""A decorator dedicated to use with asynchronous methods of an
|
||||
AioHTTPTestCase.
|
||||
|
||||
Handles executing an asynchronous function, using
|
||||
the self.loop of the AioHTTPTestCase.
|
||||
"""
|
||||
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
||||
|
||||
@functools.wraps(func, *args, **kwargs)
|
||||
def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any:
|
||||
return self.loop.run_until_complete(func(self, *inner_args, **inner_kwargs))
|
||||
|
||||
return new_func
|
||||
In 3.8+, this does nothing.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return func
|
||||
|
||||
|
||||
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
||||
|
@ -498,8 +525,7 @@ def loop_context(
|
|||
def setup_test_loop(
|
||||
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
||||
) -> asyncio.AbstractEventLoop:
|
||||
"""Create and return an asyncio.BaseEventLoop
|
||||
instance.
|
||||
"""Create and return an asyncio.BaseEventLoop instance.
|
||||
|
||||
The caller should also call teardown_test_loop,
|
||||
once they are done with the loop.
|
||||
|
@ -514,7 +540,16 @@ def setup_test_loop(
|
|||
asyncio.set_event_loop(loop)
|
||||
if sys.platform != "win32" and not skip_watcher:
|
||||
policy = asyncio.get_event_loop_policy()
|
||||
watcher = asyncio.SafeChildWatcher()
|
||||
watcher: asyncio.AbstractChildWatcher
|
||||
try: # Python >= 3.8
|
||||
# Refs:
|
||||
# * https://github.com/pytest-dev/pytest-xdist/issues/620
|
||||
# * https://stackoverflow.com/a/58614689/595220
|
||||
# * https://bugs.python.org/issue35621
|
||||
# * https://github.com/python/cpython/pull/14344
|
||||
watcher = asyncio.ThreadedChildWatcher()
|
||||
except AttributeError: # Python < 3.8
|
||||
watcher = asyncio.SafeChildWatcher()
|
||||
watcher.attach_loop(loop)
|
||||
with contextlib.suppress(NotImplementedError):
|
||||
policy.set_child_watcher(watcher)
|
||||
|
@ -522,10 +557,7 @@ def setup_test_loop(
|
|||
|
||||
|
||||
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
||||
"""Teardown and cleanup an event_loop created
|
||||
by setup_test_loop.
|
||||
|
||||
"""
|
||||
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
||||
closed = loop.is_closed()
|
||||
if not closed:
|
||||
loop.call_soon(loop.stop)
|
||||
|
@ -545,7 +577,7 @@ def _create_app_mock() -> mock.MagicMock:
|
|||
def set_dict(app: Any, key: str, value: Any) -> None:
|
||||
app.__app_dict[key] = value
|
||||
|
||||
app = mock.MagicMock()
|
||||
app = mock.MagicMock(spec=Application)
|
||||
app.__app_dict = {}
|
||||
app.__getitem__ = get_dict
|
||||
app.__setitem__ = set_dict
|
||||
|
@ -583,16 +615,14 @@ def make_mocked_request(
|
|||
transport: Any = sentinel,
|
||||
payload: Any = sentinel,
|
||||
sslcontext: Optional[SSLContext] = None,
|
||||
client_max_size: int = 1024 ** 2,
|
||||
client_max_size: int = 1024**2,
|
||||
loop: Any = ...,
|
||||
) -> Request:
|
||||
"""Creates mocked web.Request testing purposes.
|
||||
|
||||
Useful in unit tests, when spinning full web server is overkill or
|
||||
specific conditions and errors are hard to trigger.
|
||||
|
||||
"""
|
||||
|
||||
task = mock.Mock()
|
||||
if loop is ...:
|
||||
loop = mock.Mock()
|
||||
|
@ -619,7 +649,7 @@ def make_mocked_request(
|
|||
headers,
|
||||
raw_hdrs,
|
||||
closing,
|
||||
False,
|
||||
None,
|
||||
False,
|
||||
chunked,
|
||||
URL(path),
|
||||
|
|
|
@ -2,16 +2,15 @@ from types import SimpleNamespace
|
|||
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
|
||||
|
||||
import attr
|
||||
from aiosignal import Signal
|
||||
from multidict import CIMultiDict
|
||||
from yarl import URL
|
||||
|
||||
from .client_reqrep import ClientResponse
|
||||
from .signals import Signal
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from .client import ClientSession
|
||||
from .typedefs import Protocol
|
||||
|
||||
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
||||
|
||||
|
@ -42,68 +41,71 @@ __all__ = (
|
|||
"TraceRequestRedirectParams",
|
||||
"TraceRequestChunkSentParams",
|
||||
"TraceResponseChunkReceivedParams",
|
||||
"TraceRequestHeadersSentParams",
|
||||
)
|
||||
|
||||
|
||||
class TraceConfig:
|
||||
"""First-class used to trace requests launched via ClientSession
|
||||
objects."""
|
||||
"""First-class used to trace requests launched via ClientSession objects."""
|
||||
|
||||
def __init__(
|
||||
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
||||
) -> None:
|
||||
self._on_request_start = Signal(
|
||||
self._on_request_start: Signal[
|
||||
_SignalCallback[TraceRequestStartParams]
|
||||
] = Signal(self)
|
||||
self._on_request_chunk_sent: Signal[
|
||||
_SignalCallback[TraceRequestChunkSentParams]
|
||||
] = Signal(self)
|
||||
self._on_response_chunk_received: Signal[
|
||||
_SignalCallback[TraceResponseChunkReceivedParams]
|
||||
] = Signal(self)
|
||||
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceRequestStartParams]]
|
||||
self._on_request_chunk_sent = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
|
||||
self._on_response_chunk_received = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
|
||||
self._on_request_end = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceRequestEndParams]]
|
||||
self._on_request_exception = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
|
||||
self._on_request_redirect = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
|
||||
self._on_connection_queued_start = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
|
||||
self._on_connection_queued_end = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
|
||||
self._on_connection_create_start = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
|
||||
self._on_connection_create_end = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
|
||||
self._on_connection_reuseconn = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
|
||||
self._on_dns_resolvehost_start = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
|
||||
self._on_dns_resolvehost_end = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
|
||||
self._on_dns_cache_hit = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
|
||||
self._on_dns_cache_miss = Signal(
|
||||
self
|
||||
) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
|
||||
)
|
||||
self._on_request_exception: Signal[
|
||||
_SignalCallback[TraceRequestExceptionParams]
|
||||
] = Signal(self)
|
||||
self._on_request_redirect: Signal[
|
||||
_SignalCallback[TraceRequestRedirectParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_queued_start: Signal[
|
||||
_SignalCallback[TraceConnectionQueuedStartParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_queued_end: Signal[
|
||||
_SignalCallback[TraceConnectionQueuedEndParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_create_start: Signal[
|
||||
_SignalCallback[TraceConnectionCreateStartParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_create_end: Signal[
|
||||
_SignalCallback[TraceConnectionCreateEndParams]
|
||||
] = Signal(self)
|
||||
self._on_connection_reuseconn: Signal[
|
||||
_SignalCallback[TraceConnectionReuseconnParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_resolvehost_start: Signal[
|
||||
_SignalCallback[TraceDnsResolveHostStartParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_resolvehost_end: Signal[
|
||||
_SignalCallback[TraceDnsResolveHostEndParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_cache_hit: Signal[
|
||||
_SignalCallback[TraceDnsCacheHitParams]
|
||||
] = Signal(self)
|
||||
self._on_dns_cache_miss: Signal[
|
||||
_SignalCallback[TraceDnsCacheMissParams]
|
||||
] = Signal(self)
|
||||
self._on_request_headers_sent: Signal[
|
||||
_SignalCallback[TraceRequestHeadersSentParams]
|
||||
] = Signal(self)
|
||||
|
||||
self._trace_config_ctx_factory = trace_config_ctx_factory
|
||||
|
||||
def trace_config_ctx(
|
||||
self, trace_request_ctx: Optional[SimpleNamespace] = None
|
||||
) -> SimpleNamespace:
|
||||
""" Return a new trace_config_ctx instance """
|
||||
"""Return a new trace_config_ctx instance"""
|
||||
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
||||
|
||||
def freeze(self) -> None:
|
||||
|
@ -122,6 +124,7 @@ class TraceConfig:
|
|||
self._on_dns_resolvehost_end.freeze()
|
||||
self._on_dns_cache_hit.freeze()
|
||||
self._on_dns_cache_miss.freeze()
|
||||
self._on_request_headers_sent.freeze()
|
||||
|
||||
@property
|
||||
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
||||
|
@ -205,10 +208,16 @@ class TraceConfig:
|
|||
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
||||
return self._on_dns_cache_miss
|
||||
|
||||
@property
|
||||
def on_request_headers_sent(
|
||||
self,
|
||||
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
||||
return self._on_request_headers_sent
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestStartParams:
|
||||
""" Parameters sent by the `on_request_start` signal"""
|
||||
"""Parameters sent by the `on_request_start` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -217,7 +226,7 @@ class TraceRequestStartParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestChunkSentParams:
|
||||
""" Parameters sent by the `on_request_chunk_sent` signal"""
|
||||
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -226,7 +235,7 @@ class TraceRequestChunkSentParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceResponseChunkReceivedParams:
|
||||
""" Parameters sent by the `on_response_chunk_received` signal"""
|
||||
"""Parameters sent by the `on_response_chunk_received` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -235,7 +244,7 @@ class TraceResponseChunkReceivedParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestEndParams:
|
||||
""" Parameters sent by the `on_request_end` signal"""
|
||||
"""Parameters sent by the `on_request_end` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -245,7 +254,7 @@ class TraceRequestEndParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestExceptionParams:
|
||||
""" Parameters sent by the `on_request_exception` signal"""
|
||||
"""Parameters sent by the `on_request_exception` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -255,7 +264,7 @@ class TraceRequestExceptionParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestRedirectParams:
|
||||
""" Parameters sent by the `on_request_redirect` signal"""
|
||||
"""Parameters sent by the `on_request_redirect` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
|
@ -265,60 +274,72 @@ class TraceRequestRedirectParams:
|
|||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionQueuedStartParams:
|
||||
""" Parameters sent by the `on_connection_queued_start` signal"""
|
||||
"""Parameters sent by the `on_connection_queued_start` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionQueuedEndParams:
|
||||
""" Parameters sent by the `on_connection_queued_end` signal"""
|
||||
"""Parameters sent by the `on_connection_queued_end` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionCreateStartParams:
|
||||
""" Parameters sent by the `on_connection_create_start` signal"""
|
||||
"""Parameters sent by the `on_connection_create_start` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionCreateEndParams:
|
||||
""" Parameters sent by the `on_connection_create_end` signal"""
|
||||
"""Parameters sent by the `on_connection_create_end` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceConnectionReuseconnParams:
|
||||
""" Parameters sent by the `on_connection_reuseconn` signal"""
|
||||
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsResolveHostStartParams:
|
||||
""" Parameters sent by the `on_dns_resolvehost_start` signal"""
|
||||
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsResolveHostEndParams:
|
||||
""" Parameters sent by the `on_dns_resolvehost_end` signal"""
|
||||
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsCacheHitParams:
|
||||
""" Parameters sent by the `on_dns_cache_hit` signal"""
|
||||
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceDnsCacheMissParams:
|
||||
""" Parameters sent by the `on_dns_cache_miss` signal"""
|
||||
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
||||
|
||||
host: str
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class TraceRequestHeadersSentParams:
|
||||
"""Parameters sent by the `on_request_headers_sent` signal"""
|
||||
|
||||
method: str
|
||||
url: URL
|
||||
headers: "CIMultiDict[str]"
|
||||
|
||||
|
||||
class Trace:
|
||||
"""Internal class used to keep together the main dependencies used
|
||||
at the moment of send a signal."""
|
||||
"""Internal dependency holder class.
|
||||
|
||||
Used to keep together the main dependencies used
|
||||
at the moment of send a signal.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -440,3 +461,12 @@ class Trace:
|
|||
return await self._trace_config.on_dns_cache_miss.send(
|
||||
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
||||
)
|
||||
|
||||
async def send_request_headers(
|
||||
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
||||
) -> None:
|
||||
return await self._trace_config._on_request_headers_sent.send(
|
||||
self._session,
|
||||
self._trace_config_ctx,
|
||||
TraceRequestHeadersSentParams(method, url, headers),
|
||||
)
|
||||
|
|
|
@ -1,12 +1,30 @@
|
|||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Tuple, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
||||
from yarl import URL
|
||||
|
||||
# These are for other modules to use (to avoid repeating the conditional import).
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
|
||||
else:
|
||||
from typing_extensions import ( # noqa: F401
|
||||
Final,
|
||||
Protocol as Protocol,
|
||||
TypedDict as TypedDict,
|
||||
)
|
||||
|
||||
DEFAULT_JSON_ENCODER = json.dumps
|
||||
DEFAULT_JSON_DECODER = json.loads
|
||||
|
||||
|
@ -16,6 +34,8 @@ if TYPE_CHECKING: # pragma: no cover
|
|||
_MultiDict = MultiDict[str]
|
||||
_MultiDictProxy = MultiDictProxy[str]
|
||||
from http.cookies import BaseCookie, Morsel
|
||||
|
||||
from .web import Request, StreamResponse
|
||||
else:
|
||||
_CIMultiDict = CIMultiDict
|
||||
_CIMultiDictProxy = CIMultiDictProxy
|
||||
|
@ -39,8 +59,6 @@ LooseCookies = Union[
|
|||
"BaseCookie[str]",
|
||||
]
|
||||
|
||||
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
||||
|
||||
if sys.version_info >= (3, 6):
|
||||
PathLike = Union[str, "os.PathLike[str]"]
|
||||
else:
|
||||
PathLike = Union[str, pathlib.PurePath]
|
||||
PathLike = Union[str, "os.PathLike[str]"]
|
||||
|
|
|
@ -6,16 +6,16 @@ from argparse import ArgumentParser
|
|||
from collections.abc import Iterable
|
||||
from importlib import import_module
|
||||
from typing import (
|
||||
Any as Any,
|
||||
Awaitable as Awaitable,
|
||||
Callable as Callable,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterable as TypingIterable,
|
||||
List as List,
|
||||
Optional as Optional,
|
||||
Set as Set,
|
||||
Type as Type,
|
||||
Union as Union,
|
||||
cast as cast,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from .abc import AbstractAccessLogger
|
||||
|
@ -136,6 +136,7 @@ from .web_urldispatcher import (
|
|||
AbstractRoute as AbstractRoute,
|
||||
DynamicResource as DynamicResource,
|
||||
PlainResource as PlainResource,
|
||||
PrefixedSubAppResource as PrefixedSubAppResource,
|
||||
Resource as Resource,
|
||||
ResourceRoute as ResourceRoute,
|
||||
StaticResource as StaticResource,
|
||||
|
@ -261,6 +262,7 @@ __all__ = (
|
|||
"AbstractRoute",
|
||||
"DynamicResource",
|
||||
"PlainResource",
|
||||
"PrefixedSubAppResource",
|
||||
"Resource",
|
||||
"ResourceRoute",
|
||||
"StaticResource",
|
||||
|
@ -279,7 +281,7 @@ __all__ = (
|
|||
try:
|
||||
from ssl import SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
SSLContext = Any # type: ignore
|
||||
SSLContext = Any # type: ignore[misc,assignment]
|
||||
|
||||
HostSequence = TypingIterable[str]
|
||||
|
||||
|
@ -290,8 +292,9 @@ async def _run_app(
|
|||
host: Optional[Union[str, HostSequence]] = None,
|
||||
port: Optional[int] = None,
|
||||
path: Optional[str] = None,
|
||||
sock: Optional[socket.socket] = None,
|
||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
||||
shutdown_timeout: float = 60.0,
|
||||
keepalive_timeout: float = 75.0,
|
||||
ssl_context: Optional[SSLContext] = None,
|
||||
print: Callable[..., None] = print,
|
||||
backlog: int = 128,
|
||||
|
@ -304,7 +307,7 @@ async def _run_app(
|
|||
) -> None:
|
||||
# A internal functio to actually do all dirty job for application running
|
||||
if asyncio.iscoroutine(app):
|
||||
app = await app # type: ignore
|
||||
app = await app # type: ignore[misc]
|
||||
|
||||
app = cast(Application, app)
|
||||
|
||||
|
@ -314,11 +317,12 @@ async def _run_app(
|
|||
access_log_class=access_log_class,
|
||||
access_log_format=access_log_format,
|
||||
access_log=access_log,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
)
|
||||
|
||||
await runner.setup()
|
||||
|
||||
sites = [] # type: List[BaseSite]
|
||||
sites: List[BaseSite] = []
|
||||
|
||||
try:
|
||||
if host is not None:
|
||||
|
@ -440,9 +444,7 @@ def _cancel_tasks(
|
|||
for task in to_cancel:
|
||||
task.cancel()
|
||||
|
||||
loop.run_until_complete(
|
||||
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
|
||||
)
|
||||
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
||||
|
||||
for task in to_cancel:
|
||||
if task.cancelled():
|
||||
|
@ -463,8 +465,9 @@ def run_app(
|
|||
host: Optional[Union[str, HostSequence]] = None,
|
||||
port: Optional[int] = None,
|
||||
path: Optional[str] = None,
|
||||
sock: Optional[socket.socket] = None,
|
||||
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
||||
shutdown_timeout: float = 60.0,
|
||||
keepalive_timeout: float = 75.0,
|
||||
ssl_context: Optional[SSLContext] = None,
|
||||
print: Callable[..., None] = print,
|
||||
backlog: int = 128,
|
||||
|
@ -474,9 +477,11 @@ def run_app(
|
|||
handle_signals: bool = True,
|
||||
reuse_address: Optional[bool] = None,
|
||||
reuse_port: Optional[bool] = None,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
) -> None:
|
||||
"""Run an app locally"""
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop is None:
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
# Configure if and only if in debugging mode and using the default logger
|
||||
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
||||
|
@ -485,34 +490,36 @@ def run_app(
|
|||
if not access_log.hasHandlers():
|
||||
access_log.addHandler(logging.StreamHandler())
|
||||
|
||||
try:
|
||||
main_task = loop.create_task(
|
||||
_run_app(
|
||||
app,
|
||||
host=host,
|
||||
port=port,
|
||||
path=path,
|
||||
sock=sock,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
ssl_context=ssl_context,
|
||||
print=print,
|
||||
backlog=backlog,
|
||||
access_log_class=access_log_class,
|
||||
access_log_format=access_log_format,
|
||||
access_log=access_log,
|
||||
handle_signals=handle_signals,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
main_task = loop.create_task(
|
||||
_run_app(
|
||||
app,
|
||||
host=host,
|
||||
port=port,
|
||||
path=path,
|
||||
sock=sock,
|
||||
shutdown_timeout=shutdown_timeout,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
ssl_context=ssl_context,
|
||||
print=print,
|
||||
backlog=backlog,
|
||||
access_log_class=access_log_class,
|
||||
access_log_format=access_log_format,
|
||||
access_log=access_log,
|
||||
handle_signals=handle_signals,
|
||||
reuse_address=reuse_address,
|
||||
reuse_port=reuse_port,
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(main_task)
|
||||
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
||||
pass
|
||||
finally:
|
||||
_cancel_tasks({main_task}, loop)
|
||||
_cancel_tasks(all_tasks(loop), loop)
|
||||
if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
loop.close()
|
||||
|
||||
|
||||
|
|
|
@ -22,6 +22,9 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
|
||||
from aiosignal import Signal
|
||||
from frozenlist import FrozenList
|
||||
|
||||
from . import hdrs
|
||||
from .abc import (
|
||||
AbstractAccessLogger,
|
||||
|
@ -29,11 +32,9 @@ from .abc import (
|
|||
AbstractRouter,
|
||||
AbstractStreamWriter,
|
||||
)
|
||||
from .frozenlist import FrozenList
|
||||
from .helpers import DEBUG
|
||||
from .http_parser import RawRequestMessage
|
||||
from .log import web_logger
|
||||
from .signals import Signal
|
||||
from .streams import StreamReader
|
||||
from .web_log import AccessLogger
|
||||
from .web_middlewares import _fix_request_current_app
|
||||
|
@ -56,12 +57,13 @@ __all__ = ("Application", "CleanupError")
|
|||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .typedefs import Handler
|
||||
|
||||
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
|
||||
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
|
||||
_Handler = Callable[[Request], Awaitable[StreamResponse]]
|
||||
_Middleware = Union[
|
||||
Callable[[Request, _Handler], Awaitable[StreamResponse]],
|
||||
Callable[["Application", _Handler], Awaitable[_Handler]], # old-style
|
||||
Callable[[Request, Handler], Awaitable[StreamResponse]],
|
||||
Callable[["Application", Handler], Awaitable[Handler]], # old-style
|
||||
]
|
||||
_Middlewares = FrozenList[_Middleware]
|
||||
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
|
||||
|
@ -70,7 +72,6 @@ else:
|
|||
# No type checker mode, skip types
|
||||
_AppSignal = Signal
|
||||
_RespPrepareSignal = Signal
|
||||
_Handler = Callable
|
||||
_Middleware = Callable
|
||||
_Middlewares = FrozenList
|
||||
_MiddlewaresHandlers = Optional[Sequence]
|
||||
|
@ -108,7 +109,7 @@ class Application(MutableMapping[str, Any]):
|
|||
router: Optional[UrlDispatcher] = None,
|
||||
middlewares: Iterable[_Middleware] = (),
|
||||
handler_args: Optional[Mapping[str, Any]] = None,
|
||||
client_max_size: int = 1024 ** 2,
|
||||
client_max_size: int = 1024**2,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None,
|
||||
debug: Any = ..., # mypy doesn't support ellipsis
|
||||
) -> None:
|
||||
|
@ -130,27 +131,27 @@ class Application(MutableMapping[str, Any]):
|
|||
"debug argument is deprecated", DeprecationWarning, stacklevel=2
|
||||
)
|
||||
self._debug = debug
|
||||
self._router = router # type: UrlDispatcher
|
||||
self._router: UrlDispatcher = router
|
||||
self._loop = loop
|
||||
self._handler_args = handler_args
|
||||
self.logger = logger
|
||||
|
||||
self._middlewares = FrozenList(middlewares) # type: _Middlewares
|
||||
self._middlewares: _Middlewares = FrozenList(middlewares)
|
||||
|
||||
# initialized on freezing
|
||||
self._middlewares_handlers = None # type: _MiddlewaresHandlers
|
||||
self._middlewares_handlers: _MiddlewaresHandlers = None
|
||||
# initialized on freezing
|
||||
self._run_middlewares = None # type: Optional[bool]
|
||||
self._run_middlewares: Optional[bool] = None
|
||||
|
||||
self._state = {} # type: Dict[str, Any]
|
||||
self._state: Dict[str, Any] = {}
|
||||
self._frozen = False
|
||||
self._pre_frozen = False
|
||||
self._subapps = [] # type: _Subapps
|
||||
self._subapps: _Subapps = []
|
||||
|
||||
self._on_response_prepare = Signal(self) # type: _RespPrepareSignal
|
||||
self._on_startup = Signal(self) # type: _AppSignal
|
||||
self._on_shutdown = Signal(self) # type: _AppSignal
|
||||
self._on_cleanup = Signal(self) # type: _AppSignal
|
||||
self._on_response_prepare: _RespPrepareSignal = Signal(self)
|
||||
self._on_startup: _AppSignal = Signal(self)
|
||||
self._on_shutdown: _AppSignal = Signal(self)
|
||||
self._on_cleanup: _AppSignal = Signal(self)
|
||||
self._cleanup_ctx = CleanupContext()
|
||||
self._on_startup.append(self._cleanup_ctx._on_startup)
|
||||
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
|
||||
|
@ -278,7 +279,7 @@ class Application(MutableMapping[str, Any]):
|
|||
@property
|
||||
def debug(self) -> bool:
|
||||
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
|
||||
return self._debug
|
||||
return self._debug # type: ignore[no-any-return]
|
||||
|
||||
def _reg_subapp_signals(self, subapp: "Application") -> None:
|
||||
def reg_handler(signame: str) -> None:
|
||||
|
@ -323,7 +324,7 @@ class Application(MutableMapping[str, Any]):
|
|||
if not isinstance(domain, str):
|
||||
raise TypeError("Domain must be str")
|
||||
elif "*" in domain:
|
||||
rule = MaskDomain(domain) # type: Domain
|
||||
rule: Domain = MaskDomain(domain)
|
||||
else:
|
||||
rule = Domain(domain)
|
||||
factory = partial(MatchedSubAppResource, rule, subapp)
|
||||
|
@ -384,7 +385,7 @@ class Application(MutableMapping[str, Any]):
|
|||
kwargs[k] = v
|
||||
|
||||
return Server(
|
||||
self._handle, # type: ignore
|
||||
self._handle, # type: ignore[arg-type]
|
||||
request_factory=self._make_request,
|
||||
loop=self._loop,
|
||||
**kwargs,
|
||||
|
@ -427,7 +428,11 @@ class Application(MutableMapping[str, Any]):
|
|||
|
||||
Should be called after shutdown()
|
||||
"""
|
||||
await self.on_cleanup.send(self)
|
||||
if self.on_cleanup.frozen:
|
||||
await self.on_cleanup.send(self)
|
||||
else:
|
||||
# If an exception occurs in startup, ensure cleanup contexts are completed.
|
||||
await self._cleanup_ctx._on_cleanup(self)
|
||||
|
||||
def _make_request(
|
||||
self,
|
||||
|
@ -477,7 +482,7 @@ class Application(MutableMapping[str, Any]):
|
|||
match_info.freeze()
|
||||
|
||||
resp = None
|
||||
request._match_info = match_info # type: ignore
|
||||
request._match_info = match_info
|
||||
expect = request.headers.get(hdrs.EXPECT)
|
||||
if expect:
|
||||
resp = await match_info.expect_handler(request)
|
||||
|
@ -488,13 +493,13 @@ class Application(MutableMapping[str, Any]):
|
|||
|
||||
if self._run_middlewares:
|
||||
for app in match_info.apps[::-1]:
|
||||
for m, new_style in app._middlewares_handlers: # type: ignore
|
||||
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
|
||||
if new_style:
|
||||
handler = update_wrapper(
|
||||
partial(m, handler=handler), handler
|
||||
)
|
||||
else:
|
||||
handler = await m(app, handler) # type: ignore
|
||||
handler = await m(app, handler) # type: ignore[arg-type]
|
||||
|
||||
resp = await handler(request)
|
||||
|
||||
|
@ -505,7 +510,7 @@ class Application(MutableMapping[str, Any]):
|
|||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<Application 0x{:x}>".format(id(self))
|
||||
return f"<Application 0x{id(self):x}>"
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return True
|
||||
|
@ -514,7 +519,7 @@ class Application(MutableMapping[str, Any]):
|
|||
class CleanupError(RuntimeError):
|
||||
@property
|
||||
def exceptions(self) -> List[BaseException]:
|
||||
return self.args[1]
|
||||
return cast(List[BaseException], self.args[1])
|
||||
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
|
@ -526,7 +531,7 @@ else:
|
|||
class CleanupContext(_CleanupContextBase):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._exits = [] # type: List[AsyncIterator[None]]
|
||||
self._exits: List[AsyncIterator[None]] = []
|
||||
|
||||
async def _on_startup(self, app: Application) -> None:
|
||||
for cb in self:
|
||||
|
|
|
@ -273,7 +273,7 @@ class HTTPMethodNotAllowed(HTTPClientError):
|
|||
content_type=content_type,
|
||||
)
|
||||
self.headers["Allow"] = allow
|
||||
self.allowed_methods = set(allowed_methods) # type: Set[str]
|
||||
self.allowed_methods: Set[str] = set(allowed_methods)
|
||||
self.method = method.upper()
|
||||
|
||||
|
||||
|
|
|
@ -9,15 +9,18 @@ from typing import ( # noqa
|
|||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .typedefs import LooseHeaders
|
||||
from .helpers import ETAG_ANY, ETag
|
||||
from .typedefs import Final, LooseHeaders
|
||||
from .web_exceptions import (
|
||||
HTTPNotModified,
|
||||
HTTPPartialContent,
|
||||
|
@ -35,7 +38,7 @@ if TYPE_CHECKING: # pragma: no cover
|
|||
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
||||
|
||||
|
||||
NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
||||
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
||||
|
||||
|
||||
class FileResponse(StreamResponse):
|
||||
|
@ -100,6 +103,30 @@ class FileResponse(StreamResponse):
|
|||
await super().write_eof()
|
||||
return writer
|
||||
|
||||
@staticmethod
|
||||
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
|
||||
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
||||
return True
|
||||
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
|
||||
|
||||
async def _not_modified(
|
||||
self, request: "BaseRequest", etag_value: str, last_modified: float
|
||||
) -> Optional[AbstractStreamWriter]:
|
||||
self.set_status(HTTPNotModified.status_code)
|
||||
self._length_check = False
|
||||
self.etag = etag_value # type: ignore[assignment]
|
||||
self.last_modified = last_modified # type: ignore[assignment]
|
||||
# Delete any Content-Length headers provided by user. HTTP 304
|
||||
# should always have empty response body
|
||||
return await super().prepare(request)
|
||||
|
||||
async def _precondition_failed(
|
||||
self, request: "BaseRequest"
|
||||
) -> Optional[AbstractStreamWriter]:
|
||||
self.set_status(HTTPPreconditionFailed.status_code)
|
||||
self.content_length = 0
|
||||
return await super().prepare(request)
|
||||
|
||||
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
||||
filepath = self._path
|
||||
|
||||
|
@ -112,20 +139,35 @@ class FileResponse(StreamResponse):
|
|||
gzip = True
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
st = await loop.run_in_executor(None, filepath.stat)
|
||||
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
|
||||
|
||||
modsince = request.if_modified_since
|
||||
if modsince is not None and st.st_mtime <= modsince.timestamp():
|
||||
self.set_status(HTTPNotModified.status_code)
|
||||
self._length_check = False
|
||||
# Delete any Content-Length headers provided by user. HTTP 304
|
||||
# should always have empty response body
|
||||
return await super().prepare(request)
|
||||
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
||||
last_modified = st.st_mtime
|
||||
|
||||
# https://tools.ietf.org/html/rfc7232#section-6
|
||||
ifmatch = request.if_match
|
||||
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
|
||||
return await self._precondition_failed(request)
|
||||
|
||||
unmodsince = request.if_unmodified_since
|
||||
if unmodsince is not None and st.st_mtime > unmodsince.timestamp():
|
||||
self.set_status(HTTPPreconditionFailed.status_code)
|
||||
return await super().prepare(request)
|
||||
if (
|
||||
unmodsince is not None
|
||||
and ifmatch is None
|
||||
and st.st_mtime > unmodsince.timestamp()
|
||||
):
|
||||
return await self._precondition_failed(request)
|
||||
|
||||
ifnonematch = request.if_none_match
|
||||
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
|
||||
return await self._not_modified(request, etag_value, last_modified)
|
||||
|
||||
modsince = request.if_modified_since
|
||||
if (
|
||||
modsince is not None
|
||||
and ifnonematch is None
|
||||
and st.st_mtime <= modsince.timestamp()
|
||||
):
|
||||
return await self._not_modified(request, etag_value, last_modified)
|
||||
|
||||
if hdrs.CONTENT_TYPE not in self.headers:
|
||||
ct, encoding = mimetypes.guess_type(str(filepath))
|
||||
|
@ -211,12 +253,14 @@ class FileResponse(StreamResponse):
|
|||
self.set_status(status)
|
||||
|
||||
if should_set_ct:
|
||||
self.content_type = ct # type: ignore
|
||||
self.content_type = ct # type: ignore[assignment]
|
||||
if encoding:
|
||||
self.headers[hdrs.CONTENT_ENCODING] = encoding
|
||||
if gzip:
|
||||
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
||||
self.last_modified = st.st_mtime # type: ignore
|
||||
|
||||
self.etag = etag_value # type: ignore[assignment]
|
||||
self.last_modified = st.st_mtime # type: ignore[assignment]
|
||||
self.content_length = count
|
||||
|
||||
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
|
||||
|
@ -228,7 +272,8 @@ class FileResponse(StreamResponse):
|
|||
real_start, real_start + count - 1, file_size
|
||||
)
|
||||
|
||||
if request.method == hdrs.METH_HEAD or self.status in [204, 304]:
|
||||
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
||||
if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
|
||||
return await super().prepare(request)
|
||||
|
||||
fobj = await loop.run_in_executor(None, filepath.open, "rb")
|
||||
|
|
|
@ -57,7 +57,7 @@ class AccessLogger(AbstractAccessLogger):
|
|||
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
|
||||
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
|
||||
CLEANUP_RE = re.compile(r"(%[^s])")
|
||||
_FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
|
||||
_FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
|
||||
|
||||
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
|
||||
"""Initialise the logger.
|
||||
|
@ -198,10 +198,10 @@ class AccessLogger(AbstractAccessLogger):
|
|||
if key.__class__ is str:
|
||||
extra[key] = value
|
||||
else:
|
||||
k1, k2 = key # type: ignore
|
||||
dct = extra.get(k1, {}) # type: ignore
|
||||
dct[k2] = value # type: ignore
|
||||
extra[k1] = dct # type: ignore
|
||||
k1, k2 = key # type: ignore[misc]
|
||||
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
|
||||
dct[k2] = value # type: ignore[index,has-type]
|
||||
extra[k1] = dct # type: ignore[has-type,assignment]
|
||||
|
||||
self.logger.info(self._log_format % tuple(values), extra=extra)
|
||||
except Exception:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import re
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
|
||||
|
||||
from .typedefs import Handler
|
||||
from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
|
||||
from .web_request import Request
|
||||
from .web_response import StreamResponse
|
||||
|
@ -21,7 +22,7 @@ async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Re
|
|||
alt_request = request.clone(rel_url=path)
|
||||
|
||||
match_info = await request.app.router.resolve(alt_request)
|
||||
alt_request._match_info = match_info # type: ignore
|
||||
alt_request._match_info = match_info
|
||||
|
||||
if match_info.http_exception is None:
|
||||
return True, alt_request
|
||||
|
@ -30,12 +31,11 @@ async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Re
|
|||
|
||||
|
||||
def middleware(f: _Func) -> _Func:
|
||||
f.__middleware_version__ = 1 # type: ignore
|
||||
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
||||
return f
|
||||
|
||||
|
||||
_Handler = Callable[[Request], Awaitable[StreamResponse]]
|
||||
_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
|
||||
_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
|
||||
|
||||
|
||||
def normalize_path_middleware(
|
||||
|
@ -43,12 +43,11 @@ def normalize_path_middleware(
|
|||
append_slash: bool = True,
|
||||
remove_slash: bool = False,
|
||||
merge_slashes: bool = True,
|
||||
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect
|
||||
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
|
||||
) -> _Middleware:
|
||||
"""
|
||||
Middleware factory which produces a middleware that normalizes
|
||||
the path of a request. By normalizing it means:
|
||||
"""Factory for producing a middleware that normalizes the path of a request.
|
||||
|
||||
Normalizing means:
|
||||
- Add or remove a trailing slash to the path.
|
||||
- Double slashes are replaced by one.
|
||||
|
||||
|
@ -74,12 +73,11 @@ def normalize_path_middleware(
|
|||
If merge_slashes is True, merge multiple consecutive slashes in the
|
||||
path into one.
|
||||
"""
|
||||
|
||||
correct_configuration = not (append_slash and remove_slash)
|
||||
assert correct_configuration, "Cannot both remove and append slash"
|
||||
|
||||
@middleware
|
||||
async def impl(request: Request, handler: _Handler) -> StreamResponse:
|
||||
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
||||
if isinstance(request.match_info.route, SystemRoute):
|
||||
paths_to_check = []
|
||||
if "?" in request.raw_path:
|
||||
|
@ -114,7 +112,7 @@ def normalize_path_middleware(
|
|||
|
||||
def _fix_request_current_app(app: "Application") -> _Middleware:
|
||||
@middleware
|
||||
async def impl(request: Request, handler: _Handler) -> StreamResponse:
|
||||
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
||||
with request.match_info.set_current_app(app):
|
||||
return await handler(request)
|
||||
|
||||
|
|
|
@ -7,13 +7,26 @@ from contextlib import suppress
|
|||
from html import escape as html_escape
|
||||
from http import HTTPStatus
|
||||
from logging import Logger
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple, Type, cast
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Deque,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
import attr
|
||||
import yarl
|
||||
|
||||
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
||||
from .base_protocol import BaseProtocol
|
||||
from .helpers import CeilTimeout, current_task
|
||||
from .helpers import ceil_timeout
|
||||
from .http import (
|
||||
HttpProcessingError,
|
||||
HttpRequestParser,
|
||||
|
@ -48,9 +61,17 @@ _RequestFactory = Callable[
|
|||
|
||||
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
||||
|
||||
|
||||
ERROR = RawRequestMessage(
|
||||
"UNKNOWN", "/", HttpVersion10, {}, {}, True, False, False, False, yarl.URL("/")
|
||||
"UNKNOWN",
|
||||
"/",
|
||||
HttpVersion10,
|
||||
{}, # type: ignore[arg-type]
|
||||
{}, # type: ignore[arg-type]
|
||||
True,
|
||||
None,
|
||||
False,
|
||||
False,
|
||||
yarl.URL("/"),
|
||||
)
|
||||
|
||||
|
||||
|
@ -62,6 +83,16 @@ class PayloadAccessError(Exception):
|
|||
"""Payload was accessed after response was sent."""
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class _ErrInfo:
|
||||
status: int
|
||||
exc: BaseException
|
||||
message: str
|
||||
|
||||
|
||||
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
||||
|
||||
|
||||
class RequestHandler(BaseProtocol):
|
||||
"""HTTP protocol implementation.
|
||||
|
||||
|
@ -73,32 +104,28 @@ class RequestHandler(BaseProtocol):
|
|||
status line, bad headers or incomplete payload. If any error occurs,
|
||||
connection gets closed.
|
||||
|
||||
:param keepalive_timeout: number of seconds before closing
|
||||
keep-alive connection
|
||||
:type keepalive_timeout: int or None
|
||||
keepalive_timeout -- number of seconds before closing
|
||||
keep-alive connection
|
||||
|
||||
:param bool tcp_keepalive: TCP keep-alive is on, default is on
|
||||
tcp_keepalive -- TCP keep-alive is on, default is on
|
||||
|
||||
:param bool debug: enable debug mode
|
||||
debug -- enable debug mode
|
||||
|
||||
:param logger: custom logger object
|
||||
:type logger: aiohttp.log.server_logger
|
||||
logger -- custom logger object
|
||||
|
||||
:param access_log_class: custom class for access_logger
|
||||
:type access_log_class: aiohttp.abc.AbstractAccessLogger
|
||||
access_log_class -- custom class for access_logger
|
||||
|
||||
:param access_log: custom logging object
|
||||
:type access_log: aiohttp.log.server_logger
|
||||
access_log -- custom logging object
|
||||
|
||||
:param str access_log_format: access log format string
|
||||
access_log_format -- access log format string
|
||||
|
||||
:param loop: Optional event loop
|
||||
loop -- Optional event loop
|
||||
|
||||
:param int max_line_size: Optional maximum header line size
|
||||
max_line_size -- Optional maximum header line size
|
||||
|
||||
:param int max_field_size: Optional maximum header field size
|
||||
max_field_size -- Optional maximum header field size
|
||||
|
||||
:param int max_headers: Optional maximum header size
|
||||
max_headers -- Optional maximum header size
|
||||
|
||||
"""
|
||||
|
||||
|
@ -118,7 +145,6 @@ class RequestHandler(BaseProtocol):
|
|||
"_messages",
|
||||
"_message_tail",
|
||||
"_waiter",
|
||||
"_error_handler",
|
||||
"_task_handler",
|
||||
"_upgrade",
|
||||
"_payload_parser",
|
||||
|
@ -149,39 +175,34 @@ class RequestHandler(BaseProtocol):
|
|||
max_headers: int = 32768,
|
||||
max_field_size: int = 8190,
|
||||
lingering_time: float = 10.0,
|
||||
read_bufsize: int = 2 ** 16,
|
||||
read_bufsize: int = 2**16,
|
||||
auto_decompress: bool = True,
|
||||
):
|
||||
|
||||
super().__init__(loop)
|
||||
|
||||
self._request_count = 0
|
||||
self._keepalive = False
|
||||
self._current_request = None # type: Optional[BaseRequest]
|
||||
self._manager = manager # type: Optional[Server]
|
||||
self._request_handler = (
|
||||
manager.request_handler
|
||||
) # type: Optional[_RequestHandler]
|
||||
self._request_factory = (
|
||||
manager.request_factory
|
||||
) # type: Optional[_RequestFactory]
|
||||
self._current_request: Optional[BaseRequest] = None
|
||||
self._manager: Optional[Server] = manager
|
||||
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
||||
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
||||
|
||||
self._tcp_keepalive = tcp_keepalive
|
||||
# placeholder to be replaced on keepalive timeout setup
|
||||
self._keepalive_time = 0.0
|
||||
self._keepalive_handle = None # type: Optional[asyncio.Handle]
|
||||
self._keepalive_handle: Optional[asyncio.Handle] = None
|
||||
self._keepalive_timeout = keepalive_timeout
|
||||
self._lingering_time = float(lingering_time)
|
||||
|
||||
self._messages = deque() # type: Any # Python 3.5 has no typing.Deque
|
||||
self._messages: Deque[_MsgType] = deque()
|
||||
self._message_tail = b""
|
||||
|
||||
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||
self._error_handler = None # type: Optional[asyncio.Task[None]]
|
||||
self._task_handler = None # type: Optional[asyncio.Task[None]]
|
||||
self._waiter: Optional[asyncio.Future[None]] = None
|
||||
self._task_handler: Optional[asyncio.Task[None]] = None
|
||||
|
||||
self._upgrade = False
|
||||
self._payload_parser = None # type: Any
|
||||
self._request_parser = HttpRequestParser(
|
||||
self._payload_parser: Any = None
|
||||
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
||||
self,
|
||||
loop,
|
||||
read_bufsize,
|
||||
|
@ -189,15 +210,16 @@ class RequestHandler(BaseProtocol):
|
|||
max_field_size=max_field_size,
|
||||
max_headers=max_headers,
|
||||
payload_exception=RequestPayloadError,
|
||||
) # type: Optional[HttpRequestParser]
|
||||
auto_decompress=auto_decompress,
|
||||
)
|
||||
|
||||
self.logger = logger
|
||||
self.debug = debug
|
||||
self.access_log = access_log
|
||||
if access_log:
|
||||
self.access_logger = access_log_class(
|
||||
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
||||
access_log, access_log_format
|
||||
) # type: Optional[AbstractAccessLogger]
|
||||
)
|
||||
else:
|
||||
self.access_logger = None
|
||||
|
||||
|
@ -215,9 +237,11 @@ class RequestHandler(BaseProtocol):
|
|||
return self._keepalive_timeout
|
||||
|
||||
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
||||
"""Worker process is about to exit, we need cleanup everything and
|
||||
stop accepting requests. It is especially important for keep-alive
|
||||
connections."""
|
||||
"""Do worker process exit preparations.
|
||||
|
||||
We need to clean up everything and stop accepting requests.
|
||||
It is especially important for keep-alive connections.
|
||||
"""
|
||||
self._force_close = True
|
||||
|
||||
if self._keepalive_handle is not None:
|
||||
|
@ -228,10 +252,7 @@ class RequestHandler(BaseProtocol):
|
|||
|
||||
# wait for handlers
|
||||
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
||||
with CeilTimeout(timeout, loop=self._loop):
|
||||
if self._error_handler is not None and not self._error_handler.done():
|
||||
await self._error_handler
|
||||
|
||||
async with ceil_timeout(timeout):
|
||||
if self._current_request is not None:
|
||||
self._current_request._cancel(asyncio.CancelledError())
|
||||
|
||||
|
@ -278,10 +299,6 @@ class RequestHandler(BaseProtocol):
|
|||
exc = ConnectionResetError("Connection lost")
|
||||
self._current_request._cancel(exc)
|
||||
|
||||
if self._error_handler is not None:
|
||||
self._error_handler.cancel()
|
||||
if self._task_handler is not None:
|
||||
self._task_handler.cancel()
|
||||
if self._waiter is not None:
|
||||
self._waiter.cancel()
|
||||
|
||||
|
@ -308,40 +325,30 @@ class RequestHandler(BaseProtocol):
|
|||
if self._force_close or self._close:
|
||||
return
|
||||
# parse http messages
|
||||
messages: Sequence[_MsgType]
|
||||
if self._payload_parser is None and not self._upgrade:
|
||||
assert self._request_parser is not None
|
||||
try:
|
||||
messages, upgraded, tail = self._request_parser.feed_data(data)
|
||||
except HttpProcessingError as exc:
|
||||
# something happened during parsing
|
||||
self._error_handler = self._loop.create_task(
|
||||
self.handle_parse_error(
|
||||
StreamWriter(self, self._loop), 400, exc, exc.message
|
||||
)
|
||||
)
|
||||
self.close()
|
||||
except Exception as exc:
|
||||
# 500: internal error
|
||||
self._error_handler = self._loop.create_task(
|
||||
self.handle_parse_error(StreamWriter(self, self._loop), 500, exc)
|
||||
)
|
||||
self.close()
|
||||
else:
|
||||
if messages:
|
||||
# sometimes the parser returns no messages
|
||||
for (msg, payload) in messages:
|
||||
self._request_count += 1
|
||||
self._messages.append((msg, payload))
|
||||
messages = [
|
||||
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
||||
]
|
||||
upgraded = False
|
||||
tail = b""
|
||||
|
||||
waiter = self._waiter
|
||||
if waiter is not None:
|
||||
if not waiter.done():
|
||||
# don't set result twice
|
||||
waiter.set_result(None)
|
||||
for msg, payload in messages or ():
|
||||
self._request_count += 1
|
||||
self._messages.append((msg, payload))
|
||||
|
||||
self._upgrade = upgraded
|
||||
if upgraded and tail:
|
||||
self._message_tail = tail
|
||||
waiter = self._waiter
|
||||
if messages and waiter is not None and not waiter.done():
|
||||
# don't set result twice
|
||||
waiter.set_result(None)
|
||||
|
||||
self._upgrade = upgraded
|
||||
if upgraded and tail:
|
||||
self._message_tail = tail
|
||||
|
||||
# no parser, just store
|
||||
elif self._payload_parser is None and self._upgrade and data:
|
||||
|
@ -364,14 +371,17 @@ class RequestHandler(BaseProtocol):
|
|||
self._keepalive_handle = None
|
||||
|
||||
def close(self) -> None:
|
||||
"""Stop accepting new pipelinig messages and close
|
||||
connection when handlers done processing messages"""
|
||||
"""Close connection.
|
||||
|
||||
Stop accepting new pipelining messages and close
|
||||
connection when handlers done processing messages.
|
||||
"""
|
||||
self._close = True
|
||||
if self._waiter:
|
||||
self._waiter.cancel()
|
||||
|
||||
def force_close(self) -> None:
|
||||
"""Force close connection"""
|
||||
"""Forcefully close connection."""
|
||||
self._force_close = True
|
||||
if self._waiter:
|
||||
self._waiter.cancel()
|
||||
|
@ -414,18 +424,17 @@ class RequestHandler(BaseProtocol):
|
|||
self,
|
||||
request: BaseRequest,
|
||||
start_time: float,
|
||||
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
||||
) -> Tuple[StreamResponse, bool]:
|
||||
assert self._request_handler is not None
|
||||
try:
|
||||
try:
|
||||
self._current_request = request
|
||||
resp = await self._request_handler(request)
|
||||
resp = await request_handler(request)
|
||||
finally:
|
||||
self._current_request = None
|
||||
except HTTPException as exc:
|
||||
resp = Response(
|
||||
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
|
||||
)
|
||||
resp = exc
|
||||
reset = await self.finish_response(request, resp, start_time)
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
|
@ -437,6 +446,15 @@ class RequestHandler(BaseProtocol):
|
|||
resp = self.handle_error(request, 500, exc)
|
||||
reset = await self.finish_response(request, resp, start_time)
|
||||
else:
|
||||
# Deprecation warning (See #2415)
|
||||
if getattr(resp, "__http_exception__", False):
|
||||
warnings.warn(
|
||||
"returning HTTPException object is deprecated "
|
||||
"(#2415) and will be removed, "
|
||||
"please raise the exception instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
reset = await self.finish_response(request, resp, start_time)
|
||||
|
||||
return resp, reset
|
||||
|
@ -477,23 +495,24 @@ class RequestHandler(BaseProtocol):
|
|||
|
||||
manager.requests_count += 1
|
||||
writer = StreamWriter(self, loop)
|
||||
if isinstance(message, _ErrInfo):
|
||||
# make request_factory work
|
||||
request_handler = self._make_error_handler(message)
|
||||
message = ERROR
|
||||
else:
|
||||
request_handler = self._request_handler
|
||||
|
||||
request = self._request_factory(message, payload, self, writer, handler)
|
||||
try:
|
||||
# a new task is used for copy context vars (#3406)
|
||||
task = self._loop.create_task(self._handle_request(request, start))
|
||||
task = self._loop.create_task(
|
||||
self._handle_request(request, start, request_handler)
|
||||
)
|
||||
try:
|
||||
resp, reset = await task
|
||||
except (asyncio.CancelledError, ConnectionError):
|
||||
self.log_debug("Ignored premature client disconnection")
|
||||
break
|
||||
# Deprecation warning (See #2415)
|
||||
if getattr(resp, "__http_exception__", False):
|
||||
warnings.warn(
|
||||
"returning HTTPException object is deprecated "
|
||||
"(#2415) and will be removed, "
|
||||
"please raise the exception instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
# Drop the processed task from asyncio.Task.all_tasks() early
|
||||
del task
|
||||
|
@ -517,7 +536,7 @@ class RequestHandler(BaseProtocol):
|
|||
|
||||
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
|
||||
while not payload.is_eof() and now < end_t:
|
||||
with CeilTimeout(end_t - now, loop=loop):
|
||||
async with ceil_timeout(end_t - now):
|
||||
# read and ignore
|
||||
await payload.readany()
|
||||
now = loop.time()
|
||||
|
@ -558,14 +577,15 @@ class RequestHandler(BaseProtocol):
|
|||
# remove handler, close transport if no handlers left
|
||||
if not self._force_close:
|
||||
self._task_handler = None
|
||||
if self.transport is not None and self._error_handler is None:
|
||||
if self.transport is not None:
|
||||
self.transport.close()
|
||||
|
||||
async def finish_response(
|
||||
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
||||
) -> bool:
|
||||
"""
|
||||
Prepare the response and write_eof, then log access. This has to
|
||||
"""Prepare the response and write_eof, then log access.
|
||||
|
||||
This has to
|
||||
be called within the context of any exception so the access logger
|
||||
can get exception information. Returns True if the client disconnects
|
||||
prematurely.
|
||||
|
@ -607,9 +627,17 @@ class RequestHandler(BaseProtocol):
|
|||
"""Handle errors.
|
||||
|
||||
Returns HTTP response with specific status code. Logs additional
|
||||
information. It always closes current connection."""
|
||||
information. It always closes current connection.
|
||||
"""
|
||||
self.log_exception("Error handling request", exc_info=exc)
|
||||
|
||||
# some data already got sent, connection is broken
|
||||
if request.writer.output_size > 0:
|
||||
raise ConnectionError(
|
||||
"Response is sent already, cannot send another response "
|
||||
"with the error message"
|
||||
)
|
||||
|
||||
ct = "text/plain"
|
||||
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
||||
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
||||
|
@ -638,30 +666,14 @@ class RequestHandler(BaseProtocol):
|
|||
resp = Response(status=status, text=message, content_type=ct)
|
||||
resp.force_close()
|
||||
|
||||
# some data already got sent, connection is broken
|
||||
if request.writer.output_size > 0 or self.transport is None:
|
||||
self.force_close()
|
||||
|
||||
return resp
|
||||
|
||||
async def handle_parse_error(
|
||||
self,
|
||||
writer: AbstractStreamWriter,
|
||||
status: int,
|
||||
exc: Optional[BaseException] = None,
|
||||
message: Optional[str] = None,
|
||||
) -> None:
|
||||
task = current_task()
|
||||
assert task is not None
|
||||
request = BaseRequest(
|
||||
ERROR, EMPTY_PAYLOAD, self, writer, task, self._loop # type: ignore
|
||||
)
|
||||
def _make_error_handler(
|
||||
self, err_info: _ErrInfo
|
||||
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
||||
async def handler(request: BaseRequest) -> StreamResponse:
|
||||
return self.handle_error(
|
||||
request, err_info.status, err_info.exc, err_info.message
|
||||
)
|
||||
|
||||
resp = self.handle_error(request, status, exc, message)
|
||||
await resp.prepare(request)
|
||||
await resp.write_eof()
|
||||
|
||||
if self.transport is not None:
|
||||
self.transport.close()
|
||||
|
||||
self._error_handler = None
|
||||
return handler
|
||||
|
|
|
@ -7,7 +7,6 @@ import string
|
|||
import tempfile
|
||||
import types
|
||||
import warnings
|
||||
from email.utils import parsedate
|
||||
from http.cookies import SimpleCookie
|
||||
from types import MappingProxyType
|
||||
from typing import (
|
||||
|
@ -18,6 +17,7 @@ from typing import (
|
|||
Mapping,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Pattern,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
|
@ -30,13 +30,24 @@ from yarl import URL
|
|||
|
||||
from . import hdrs
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel
|
||||
from .helpers import (
|
||||
DEBUG,
|
||||
ETAG_ANY,
|
||||
LIST_QUOTED_ETAG_RE,
|
||||
ChainMapProxy,
|
||||
ETag,
|
||||
HeadersMixin,
|
||||
parse_http_date,
|
||||
reify,
|
||||
sentinel,
|
||||
)
|
||||
from .http_parser import RawRequestMessage
|
||||
from .http_writer import HttpVersion
|
||||
from .multipart import BodyPartReader, MultipartReader
|
||||
from .streams import EmptyStreamReader, StreamReader
|
||||
from .typedefs import (
|
||||
DEFAULT_JSON_DECODER,
|
||||
Final,
|
||||
JSONDecoder,
|
||||
LooseHeaders,
|
||||
RawHeaders,
|
||||
|
@ -63,31 +74,33 @@ class FileField:
|
|||
headers: "CIMultiDictProxy[str]"
|
||||
|
||||
|
||||
_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
|
||||
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
|
||||
# '-' at the end to prevent interpretation as range in a char class
|
||||
|
||||
_TOKEN = fr"[{_TCHAR}]+"
|
||||
_TOKEN: Final[str] = rf"[{_TCHAR}]+"
|
||||
|
||||
_QDTEXT = r"[{}]".format(
|
||||
_QDTEXT: Final[str] = r"[{}]".format(
|
||||
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
|
||||
)
|
||||
# qdtext includes 0x5C to escape 0x5D ('\]')
|
||||
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
|
||||
|
||||
_QUOTED_PAIR = r"\\[\t !-~]"
|
||||
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
|
||||
|
||||
_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format(
|
||||
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
|
||||
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
|
||||
)
|
||||
|
||||
_FORWARDED_PAIR = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
|
||||
_FORWARDED_PAIR: Final[
|
||||
str
|
||||
] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
|
||||
token=_TOKEN, quoted_string=_QUOTED_STRING
|
||||
)
|
||||
|
||||
_QUOTED_PAIR_REPLACE_RE = re.compile(r"\\([\t !-~])")
|
||||
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
|
||||
# same pattern as _QUOTED_PAIR but contains a capture group
|
||||
|
||||
_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR)
|
||||
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
|
||||
|
||||
############################################################
|
||||
# HTTP Request
|
||||
|
@ -135,7 +148,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
task: "asyncio.Task[None]",
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
*,
|
||||
client_max_size: int = 1024 ** 2,
|
||||
client_max_size: int = 1024**2,
|
||||
state: Optional[Dict[str, Any]] = None,
|
||||
scheme: Optional[str] = None,
|
||||
host: Optional[str] = None,
|
||||
|
@ -151,14 +164,22 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
self._headers = message.headers
|
||||
self._method = message.method
|
||||
self._version = message.version
|
||||
self._rel_url = message.url
|
||||
self._post = (
|
||||
None
|
||||
) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
|
||||
self._read_bytes = None # type: Optional[bytes]
|
||||
self._cache: Dict[str, Any] = {}
|
||||
url = message.url
|
||||
if url.is_absolute():
|
||||
# absolute URL is given,
|
||||
# override auto-calculating url, host, and scheme
|
||||
# all other properties should be good
|
||||
self._cache["url"] = url
|
||||
self._cache["host"] = url.host
|
||||
self._cache["scheme"] = url.scheme
|
||||
self._rel_url = url.relative()
|
||||
else:
|
||||
self._rel_url = message.url
|
||||
self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
|
||||
self._read_bytes: Optional[bytes] = None
|
||||
|
||||
self._state = state
|
||||
self._cache = {} # type: Dict[str, Any]
|
||||
self._task = task
|
||||
self._client_max_size = client_max_size
|
||||
self._loop = loop
|
||||
|
@ -190,13 +211,11 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
Creates and returns a new instance of Request object. If no parameters
|
||||
are given, an exact copy is returned. If a parameter is not passed, it
|
||||
will reuse the one from the current request object.
|
||||
|
||||
"""
|
||||
|
||||
if self._read_bytes:
|
||||
raise RuntimeError("Cannot clone request " "after reading its content")
|
||||
|
||||
dct = {} # type: Dict[str, Any]
|
||||
dct: Dict[str, Any] = {}
|
||||
if method is not sentinel:
|
||||
dct["method"] = method
|
||||
if rel_url is not sentinel:
|
||||
|
@ -315,7 +334,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
length = len(field_value)
|
||||
pos = 0
|
||||
need_separator = False
|
||||
elem = {} # type: Dict[str, str]
|
||||
elem: Dict[str, str] = {}
|
||||
elems.append(types.MappingProxyType(elem))
|
||||
while 0 <= pos < length:
|
||||
match = _FORWARDED_PAIR_RE.match(field_value, pos)
|
||||
|
@ -396,8 +415,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
host = self._message.headers.get(hdrs.HOST)
|
||||
if host is not None:
|
||||
return host
|
||||
else:
|
||||
return socket.getfqdn()
|
||||
return socket.getfqdn()
|
||||
|
||||
@reify
|
||||
def remote(self) -> Optional[str]:
|
||||
|
@ -408,10 +426,11 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
- overridden value by .clone(remote=new_remote) call.
|
||||
- peername of opened socket
|
||||
"""
|
||||
if self._transport_peername is None:
|
||||
return None
|
||||
if isinstance(self._transport_peername, (list, tuple)):
|
||||
return self._transport_peername[0]
|
||||
else:
|
||||
return self._transport_peername
|
||||
return str(self._transport_peername[0])
|
||||
return str(self._transport_peername)
|
||||
|
||||
@reify
|
||||
def url(self) -> URL:
|
||||
|
@ -437,6 +456,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
@reify
|
||||
def raw_path(self) -> str:
|
||||
"""The URL including raw *PATH INFO* without the host or scheme.
|
||||
|
||||
Warning, the path is unquoted and may contains non valid URL characters
|
||||
|
||||
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
|
||||
|
@ -446,7 +466,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
@reify
|
||||
def query(self) -> "MultiDictProxy[str]":
|
||||
"""A multidict with all the variables in the query string."""
|
||||
return self._rel_url.query
|
||||
return MultiDictProxy(self._rel_url.query)
|
||||
|
||||
@reify
|
||||
def query_string(self) -> str:
|
||||
|
@ -466,22 +486,13 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
"""A sequence of pairs for all headers."""
|
||||
return self._message.raw_headers
|
||||
|
||||
@staticmethod
|
||||
def _http_date(_date_str: Optional[str]) -> Optional[datetime.datetime]:
|
||||
"""Process a date string, return a datetime object"""
|
||||
if _date_str is not None:
|
||||
timetuple = parsedate(_date_str)
|
||||
if timetuple is not None:
|
||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
||||
@reify
|
||||
def if_modified_since(self) -> Optional[datetime.datetime]:
|
||||
"""The value of If-Modified-Since HTTP header, or None.
|
||||
|
||||
This header is represented as a `datetime` object.
|
||||
"""
|
||||
return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
||||
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
||||
|
||||
@reify
|
||||
def if_unmodified_since(self) -> Optional[datetime.datetime]:
|
||||
|
@ -489,7 +500,53 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
|
||||
This header is represented as a `datetime` object.
|
||||
"""
|
||||
return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
||||
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
||||
|
||||
@staticmethod
|
||||
def _etag_values(etag_header: str) -> Iterator[ETag]:
|
||||
"""Extract `ETag` objects from raw header."""
|
||||
if etag_header == ETAG_ANY:
|
||||
yield ETag(
|
||||
is_weak=False,
|
||||
value=ETAG_ANY,
|
||||
)
|
||||
else:
|
||||
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
|
||||
is_weak, value, garbage = match.group(2, 3, 4)
|
||||
# Any symbol captured by 4th group means
|
||||
# that the following sequence is invalid.
|
||||
if garbage:
|
||||
break
|
||||
|
||||
yield ETag(
|
||||
is_weak=bool(is_weak),
|
||||
value=value,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _if_match_or_none_impl(
|
||||
cls, header_value: Optional[str]
|
||||
) -> Optional[Tuple[ETag, ...]]:
|
||||
if not header_value:
|
||||
return None
|
||||
|
||||
return tuple(cls._etag_values(header_value))
|
||||
|
||||
@reify
|
||||
def if_match(self) -> Optional[Tuple[ETag, ...]]:
|
||||
"""The value of If-Match HTTP header, or None.
|
||||
|
||||
This header is represented as a `tuple` of `ETag` objects.
|
||||
"""
|
||||
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
|
||||
|
||||
@reify
|
||||
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
|
||||
"""The value of If-None-Match HTTP header, or None.
|
||||
|
||||
This header is represented as a `tuple` of `ETag` objects.
|
||||
"""
|
||||
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
|
||||
|
||||
@reify
|
||||
def if_range(self) -> Optional[datetime.datetime]:
|
||||
|
@ -497,7 +554,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
|
||||
This header is represented as a `datetime` object.
|
||||
"""
|
||||
return self._http_date(self.headers.get(hdrs.IF_RANGE))
|
||||
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
|
||||
|
||||
@reify
|
||||
def keep_alive(self) -> bool:
|
||||
|
@ -511,7 +568,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
A read-only dictionary-like object.
|
||||
"""
|
||||
raw = self.headers.get(hdrs.COOKIE, "")
|
||||
parsed = SimpleCookie(raw) # type: SimpleCookie[str]
|
||||
parsed: SimpleCookie[str] = SimpleCookie(raw)
|
||||
return MappingProxyType({key: val.value for key, val in parsed.items()})
|
||||
|
||||
@reify
|
||||
|
@ -634,7 +691,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
self._post = MultiDictProxy(MultiDict())
|
||||
return self._post
|
||||
|
||||
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
|
||||
out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
|
||||
|
||||
if content_type == "multipart/form-data":
|
||||
multipart = await self.multipart()
|
||||
|
@ -655,16 +712,17 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||
if field.filename:
|
||||
# store file in temp file
|
||||
tmp = tempfile.TemporaryFile()
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
while chunk:
|
||||
chunk = field.decode(chunk)
|
||||
tmp.write(chunk)
|
||||
size += len(chunk)
|
||||
if 0 < max_size < size:
|
||||
tmp.close()
|
||||
raise HTTPRequestEntityTooLarge(
|
||||
max_size=max_size, actual_size=size
|
||||
)
|
||||
chunk = await field.read_chunk(size=2 ** 16)
|
||||
chunk = await field.read_chunk(size=2**16)
|
||||
tmp.seek(0)
|
||||
|
||||
if field_ct is None:
|
||||
|
@ -756,7 +814,7 @@ class Request(BaseRequest):
|
|||
# or information about traversal lookup
|
||||
|
||||
# initialized after route resolving
|
||||
self._match_info = None # type: Optional[UrlMappingMatchInfo]
|
||||
self._match_info: Optional[UrlMappingMatchInfo] = None
|
||||
|
||||
if DEBUG:
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import time
|
|||
import warnings
|
||||
import zlib
|
||||
from concurrent.futures import Executor
|
||||
from email.utils import parsedate
|
||||
from http.cookies import Morsel, SimpleCookie
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
|
@ -27,7 +26,17 @@ from multidict import CIMultiDict, istr
|
|||
|
||||
from . import hdrs, payload
|
||||
from .abc import AbstractStreamWriter
|
||||
from .helpers import PY_38, HeadersMixin, rfc822_formatted_time, sentinel
|
||||
from .helpers import (
|
||||
ETAG_ANY,
|
||||
PY_38,
|
||||
QUOTED_ETAG_RE,
|
||||
ETag,
|
||||
HeadersMixin,
|
||||
parse_http_date,
|
||||
rfc822_formatted_time,
|
||||
sentinel,
|
||||
validate_etag_value,
|
||||
)
|
||||
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
||||
from .payload import Payload
|
||||
from .typedefs import JSONEncoder, LooseHeaders
|
||||
|
@ -46,7 +55,7 @@ else:
|
|||
if not PY_38:
|
||||
# allow samesite to be used in python < 3.8
|
||||
# already permitted in python 3.8, see https://bugs.python.org/issue29613
|
||||
Morsel._reserved["samesite"] = "SameSite" # type: ignore
|
||||
Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
|
||||
|
||||
|
||||
class ContentCoding(enum.Enum):
|
||||
|
@ -76,20 +85,20 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
headers: Optional[LooseHeaders] = None,
|
||||
) -> None:
|
||||
self._body = None
|
||||
self._keep_alive = None # type: Optional[bool]
|
||||
self._keep_alive: Optional[bool] = None
|
||||
self._chunked = False
|
||||
self._compression = False
|
||||
self._compression_force = None # type: Optional[ContentCoding]
|
||||
self._cookies = SimpleCookie() # type: SimpleCookie[str]
|
||||
self._compression_force: Optional[ContentCoding] = None
|
||||
self._cookies: SimpleCookie[str] = SimpleCookie()
|
||||
|
||||
self._req = None # type: Optional[BaseRequest]
|
||||
self._payload_writer = None # type: Optional[AbstractStreamWriter]
|
||||
self._req: Optional[BaseRequest] = None
|
||||
self._payload_writer: Optional[AbstractStreamWriter] = None
|
||||
self._eof_sent = False
|
||||
self._body_length = 0
|
||||
self._state = {} # type: Dict[str, Any]
|
||||
self._state: Dict[str, Any] = {}
|
||||
|
||||
if headers is not None:
|
||||
self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
||||
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
||||
else:
|
||||
self._headers = CIMultiDict()
|
||||
|
||||
|
@ -100,8 +109,11 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
return self._payload_writer is not None
|
||||
|
||||
@property
|
||||
def task(self) -> "asyncio.Task[None]":
|
||||
return getattr(self._req, "task", None)
|
||||
def task(self) -> "Optional[asyncio.Task[None]]":
|
||||
if self._req:
|
||||
return self._req.task
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def status(self) -> int:
|
||||
|
@ -209,7 +221,6 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
Sets new cookie or updates existent with new value.
|
||||
Also updates only those params which are not None.
|
||||
"""
|
||||
|
||||
old = self._cookies.get(name)
|
||||
if old is not None and old.coded_value == "":
|
||||
# deleted cookie
|
||||
|
@ -314,12 +325,7 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
|
||||
This header is represented as a `datetime` object.
|
||||
"""
|
||||
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
|
||||
if httpdate is not None:
|
||||
timetuple = parsedate(httpdate)
|
||||
if timetuple is not None:
|
||||
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
||||
|
||||
@last_modified.setter
|
||||
def last_modified(
|
||||
|
@ -338,6 +344,43 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
elif isinstance(value, str):
|
||||
self._headers[hdrs.LAST_MODIFIED] = value
|
||||
|
||||
@property
|
||||
def etag(self) -> Optional[ETag]:
|
||||
quoted_value = self._headers.get(hdrs.ETAG)
|
||||
if not quoted_value:
|
||||
return None
|
||||
elif quoted_value == ETAG_ANY:
|
||||
return ETag(value=ETAG_ANY)
|
||||
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
||||
if not match:
|
||||
return None
|
||||
is_weak, value = match.group(1, 2)
|
||||
return ETag(
|
||||
is_weak=bool(is_weak),
|
||||
value=value,
|
||||
)
|
||||
|
||||
@etag.setter
|
||||
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
||||
if value is None:
|
||||
self._headers.pop(hdrs.ETAG, None)
|
||||
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
||||
isinstance(value, ETag) and value.value == ETAG_ANY
|
||||
):
|
||||
self._headers[hdrs.ETAG] = ETAG_ANY
|
||||
elif isinstance(value, str):
|
||||
validate_etag_value(value)
|
||||
self._headers[hdrs.ETAG] = f'"{value}"'
|
||||
elif isinstance(value, ETag) and isinstance(value.value, str):
|
||||
validate_etag_value(value.value)
|
||||
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
||||
self._headers[hdrs.ETAG] = hdr_value
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unsupported etag type: {type(value)}. "
|
||||
f"etag must be str, ETag or None"
|
||||
)
|
||||
|
||||
def _generate_content_type_header(
|
||||
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
||||
) -> None:
|
||||
|
@ -420,7 +463,7 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
elif self._length_check:
|
||||
writer.length = self.content_length
|
||||
if writer.length is None:
|
||||
if version >= HttpVersion11:
|
||||
if version >= HttpVersion11 and self.status != 204:
|
||||
writer.enable_chunking()
|
||||
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
||||
if hdrs.CONTENT_LENGTH in headers:
|
||||
|
@ -432,7 +475,8 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||
elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
|
||||
del headers[hdrs.CONTENT_LENGTH]
|
||||
|
||||
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
||||
if self.status not in (204, 304):
|
||||
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
||||
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
||||
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
||||
|
||||
|
@ -545,7 +589,7 @@ class Response(StreamResponse):
|
|||
raise ValueError("body and text are not allowed together")
|
||||
|
||||
if headers is None:
|
||||
real_headers = CIMultiDict() # type: CIMultiDict[str]
|
||||
real_headers: CIMultiDict[str] = CIMultiDict()
|
||||
elif not isinstance(headers, CIMultiDict):
|
||||
real_headers = CIMultiDict(headers)
|
||||
else:
|
||||
|
@ -594,7 +638,7 @@ class Response(StreamResponse):
|
|||
else:
|
||||
self.body = body
|
||||
|
||||
self._compressed_body = None # type: Optional[bytes]
|
||||
self._compressed_body: Optional[bytes] = None
|
||||
self._zlib_executor_size = zlib_executor_size
|
||||
self._zlib_executor = zlib_executor
|
||||
|
||||
|
@ -610,8 +654,8 @@ class Response(StreamResponse):
|
|||
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
||||
) -> None:
|
||||
if body is None:
|
||||
self._body = None # type: Optional[bytes]
|
||||
self._body_payload = False # type: bool
|
||||
self._body: Optional[bytes] = None
|
||||
self._body_payload: bool = False
|
||||
elif isinstance(body, (bytes, bytearray)):
|
||||
self._body = body
|
||||
self._body_payload = False
|
||||
|
@ -691,7 +735,7 @@ class Response(StreamResponse):
|
|||
if self._eof_sent:
|
||||
return
|
||||
if self._compressed_body is None:
|
||||
body = self._body # type: Optional[Union[bytes, Payload]]
|
||||
body: Optional[Union[bytes, Payload]] = self._body
|
||||
else:
|
||||
body = self._compressed_body
|
||||
assert not data, f"data arg is not supported, got {data!r}"
|
||||
|
|
|
@ -3,7 +3,6 @@ import os # noqa
|
|||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
|
@ -19,7 +18,7 @@ import attr
|
|||
|
||||
from . import hdrs
|
||||
from .abc import AbstractView
|
||||
from .typedefs import PathLike
|
||||
from .typedefs import Handler, PathLike
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from .web_request import Request
|
||||
|
@ -53,8 +52,7 @@ class AbstractRouteDef(abc.ABC):
|
|||
pass # pragma: no cover
|
||||
|
||||
|
||||
_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
|
||||
_HandlerType = Union[Type[AbstractView], _SimpleHandler]
|
||||
_HandlerType = Union[Type[AbstractView], Handler]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
||||
|
@ -158,10 +156,10 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
|
|||
"""Route definition table"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._items = [] # type: List[AbstractRouteDef]
|
||||
self._items: List[AbstractRouteDef] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<RouteTableDef count={}>".format(len(self._items))
|
||||
return f"<RouteTableDef count={len(self._items)}>"
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> AbstractRouteDef:
|
||||
|
@ -171,7 +169,7 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
|
|||
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
|
||||
...
|
||||
|
||||
def __getitem__(self, index): # type: ignore
|
||||
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
||||
return self._items[index]
|
||||
|
||||
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
||||
|
@ -208,6 +206,9 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
|
|||
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
||||
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
||||
|
||||
def options(self, path: str, **kwargs: Any) -> _Deco:
|
||||
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
|
||||
|
||||
def view(self, path: str, **kwargs: Any) -> _Deco:
|
||||
return self.route(hdrs.METH_ANY, path, **kwargs)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from .web_server import Server
|
|||
try:
|
||||
from ssl import SSLContext
|
||||
except ImportError:
|
||||
SSLContext = object # type: ignore
|
||||
SSLContext = object # type: ignore[misc,assignment]
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
@ -53,7 +53,7 @@ class BaseSite(ABC):
|
|||
self._shutdown_timeout = shutdown_timeout
|
||||
self._ssl_context = ssl_context
|
||||
self._backlog = backlog
|
||||
self._server = None # type: Optional[asyncio.AbstractServer]
|
||||
self._server: Optional[asyncio.AbstractServer] = None
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
|
@ -171,7 +171,9 @@ class NamedPipeSite(BaseSite):
|
|||
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
|
||||
) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore
|
||||
if not isinstance(
|
||||
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Named Pipes only available in proactor" "loop under windows"
|
||||
)
|
||||
|
@ -187,7 +189,9 @@ class NamedPipeSite(BaseSite):
|
|||
loop = asyncio.get_event_loop()
|
||||
server = self._runner.server
|
||||
assert server is not None
|
||||
_server = await loop.start_serving_pipe(server, self._path) # type: ignore
|
||||
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
|
||||
server, self._path
|
||||
)
|
||||
self._server = _server[0]
|
||||
|
||||
|
||||
|
@ -238,8 +242,8 @@ class BaseRunner(ABC):
|
|||
def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
|
||||
self._handle_signals = handle_signals
|
||||
self._kwargs = kwargs
|
||||
self._server = None # type: Optional[Server]
|
||||
self._sites = [] # type: List[BaseSite]
|
||||
self._server: Optional[Server] = None
|
||||
self._sites: List[BaseSite] = []
|
||||
|
||||
@property
|
||||
def server(self) -> Optional[Server]:
|
||||
|
@ -247,7 +251,7 @@ class BaseRunner(ABC):
|
|||
|
||||
@property
|
||||
def addresses(self) -> List[Any]:
|
||||
ret = [] # type: List[Any]
|
||||
ret: List[Any] = []
|
||||
for site in self._sites:
|
||||
server = site._server
|
||||
if server is not None:
|
||||
|
@ -281,10 +285,6 @@ class BaseRunner(ABC):
|
|||
async def cleanup(self) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if self._server is None:
|
||||
# no started yet, do nothing
|
||||
return
|
||||
|
||||
# The loop over sites is intentional, an exception on gather()
|
||||
# leaves self._sites in unpredictable state.
|
||||
# The loop guaranties that a site is either deleted on success or
|
||||
|
|
|
@ -22,7 +22,7 @@ class Server:
|
|||
**kwargs: Any
|
||||
) -> None:
|
||||
self._loop = get_running_loop(loop)
|
||||
self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
|
||||
self._connections: Dict[RequestHandler, asyncio.Transport] = {}
|
||||
self._kwargs = kwargs
|
||||
self.requests_count = 0
|
||||
self.request_handler = handler
|
||||
|
|
|
@ -33,14 +33,13 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from yarl import URL, __version__ as yarl_version # type: ignore
|
||||
from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined]
|
||||
|
||||
from . import hdrs
|
||||
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
|
||||
from .helpers import DEBUG
|
||||
from .http import HttpVersion11
|
||||
from .typedefs import PathLike
|
||||
from .typedefs import Final, Handler, PathLike, TypedDict
|
||||
from .web_exceptions import (
|
||||
HTTPException,
|
||||
HTTPExpectationFailed,
|
||||
|
@ -74,16 +73,19 @@ if TYPE_CHECKING: # pragma: no cover
|
|||
else:
|
||||
BaseDict = dict
|
||||
|
||||
YARL_VERSION = tuple(map(int, yarl_version.split(".")[:2]))
|
||||
YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
|
||||
|
||||
HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$")
|
||||
ROUTE_RE = re.compile(r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})")
|
||||
PATH_SEP = re.escape("/")
|
||||
HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
|
||||
r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
|
||||
)
|
||||
ROUTE_RE: Final[Pattern[str]] = re.compile(
|
||||
r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
|
||||
)
|
||||
PATH_SEP: Final[str] = re.escape("/")
|
||||
|
||||
|
||||
_WebHandler = Callable[[Request], Awaitable[StreamResponse]]
|
||||
_ExpectHandler = Callable[[Request], Awaitable[None]]
|
||||
_Resolve = Tuple[Optional[AbstractMatchInfo], Set[str]]
|
||||
_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
|
||||
|
||||
|
||||
class _InfoDict(TypedDict, total=False):
|
||||
|
@ -128,16 +130,16 @@ class AbstractResource(Sized, Iterable["AbstractRoute"]):
|
|||
|
||||
@abc.abstractmethod # pragma: no branch
|
||||
async def resolve(self, request: Request) -> _Resolve:
|
||||
"""Resolve resource
|
||||
"""Resolve resource.
|
||||
|
||||
Return (UrlMappingMatchInfo, allowed_methods) pair."""
|
||||
Return (UrlMappingMatchInfo, allowed_methods) pair.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_prefix(self, prefix: str) -> None:
|
||||
"""Add a prefix to processed URLs.
|
||||
|
||||
Required for subapplications support.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -156,7 +158,7 @@ class AbstractRoute(abc.ABC):
|
|||
def __init__(
|
||||
self,
|
||||
method: str,
|
||||
handler: Union[_WebHandler, Type[AbstractView]],
|
||||
handler: Union[Handler, Type[AbstractView]],
|
||||
*,
|
||||
expect_handler: Optional[_ExpectHandler] = None,
|
||||
resource: Optional[AbstractResource] = None,
|
||||
|
@ -193,7 +195,7 @@ class AbstractRoute(abc.ABC):
|
|||
result = old_handler(request)
|
||||
if asyncio.iscoroutine(result):
|
||||
return await result
|
||||
return result # type: ignore
|
||||
return result # type: ignore[return-value]
|
||||
|
||||
old_handler = handler
|
||||
handler = handler_wrapper
|
||||
|
@ -208,7 +210,7 @@ class AbstractRoute(abc.ABC):
|
|||
return self._method
|
||||
|
||||
@property
|
||||
def handler(self) -> _WebHandler:
|
||||
def handler(self) -> Handler:
|
||||
return self._handler
|
||||
|
||||
@property
|
||||
|
@ -236,12 +238,12 @@ class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
|
|||
def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
|
||||
super().__init__(match_dict)
|
||||
self._route = route
|
||||
self._apps = [] # type: List[Application]
|
||||
self._current_app = None # type: Optional[Application]
|
||||
self._apps: List[Application] = []
|
||||
self._current_app: Optional[Application] = None
|
||||
self._frozen = False
|
||||
|
||||
@property
|
||||
def handler(self) -> _WebHandler:
|
||||
def handler(self) -> Handler:
|
||||
return self._route.handler
|
||||
|
||||
@property
|
||||
|
@ -256,7 +258,7 @@ class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
|
|||
def http_exception(self) -> Optional[HTTPException]:
|
||||
return None
|
||||
|
||||
def get_info(self) -> _InfoDict: # type: ignore
|
||||
def get_info(self) -> _InfoDict: # type: ignore[override]
|
||||
return self._route.get_info()
|
||||
|
||||
@property
|
||||
|
@ -331,12 +333,12 @@ async def _default_expect_handler(request: Request) -> None:
|
|||
class Resource(AbstractResource):
|
||||
def __init__(self, *, name: Optional[str] = None) -> None:
|
||||
super().__init__(name=name)
|
||||
self._routes = [] # type: List[ResourceRoute]
|
||||
self._routes: List[ResourceRoute] = []
|
||||
|
||||
def add_route(
|
||||
self,
|
||||
method: str,
|
||||
handler: Union[Type[AbstractView], _WebHandler],
|
||||
handler: Union[Type[AbstractView], Handler],
|
||||
*,
|
||||
expect_handler: Optional[_ExpectHandler] = None,
|
||||
) -> "ResourceRoute":
|
||||
|
@ -360,7 +362,7 @@ class Resource(AbstractResource):
|
|||
self._routes.append(route)
|
||||
|
||||
async def resolve(self, request: Request) -> _Resolve:
|
||||
allowed_methods = set() # type: Set[str]
|
||||
allowed_methods: Set[str] = set()
|
||||
|
||||
match_dict = self._match(request.rel_url.raw_path)
|
||||
if match_dict is None:
|
||||
|
@ -421,7 +423,7 @@ class PlainResource(Resource):
|
|||
def get_info(self) -> _InfoDict:
|
||||
return {"path": self._path}
|
||||
|
||||
def url_for(self) -> URL: # type: ignore
|
||||
def url_for(self) -> URL: # type: ignore[override]
|
||||
return URL.build(path=self._path, encoded=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
|
@ -511,6 +513,7 @@ class PrefixResource(AbstractResource):
|
|||
assert prefix in ("", "/") or not prefix.endswith("/"), prefix
|
||||
super().__init__(name=name)
|
||||
self._prefix = _requote_path(prefix)
|
||||
self._prefix2 = self._prefix + "/"
|
||||
|
||||
@property
|
||||
def canonical(self) -> str:
|
||||
|
@ -521,6 +524,7 @@ class PrefixResource(AbstractResource):
|
|||
assert not prefix.endswith("/")
|
||||
assert len(prefix) > 1
|
||||
self._prefix = prefix + self._prefix
|
||||
self._prefix2 = self._prefix + "/"
|
||||
|
||||
def raw_match(self, prefix: str) -> bool:
|
||||
return False
|
||||
|
@ -569,7 +573,7 @@ class StaticResource(PrefixResource):
|
|||
),
|
||||
}
|
||||
|
||||
def url_for( # type: ignore
|
||||
def url_for( # type: ignore[override]
|
||||
self,
|
||||
*,
|
||||
filename: Union[str, Path],
|
||||
|
@ -621,7 +625,7 @@ class StaticResource(PrefixResource):
|
|||
"routes": self._routes,
|
||||
}
|
||||
|
||||
def set_options_route(self, handler: _WebHandler) -> None:
|
||||
def set_options_route(self, handler: Handler) -> None:
|
||||
if "OPTIONS" in self._routes:
|
||||
raise RuntimeError("OPTIONS route was set already")
|
||||
self._routes["OPTIONS"] = ResourceRoute(
|
||||
|
@ -632,7 +636,7 @@ class StaticResource(PrefixResource):
|
|||
path = request.rel_url.raw_path
|
||||
method = request.method
|
||||
allowed_methods = set(self._routes)
|
||||
if not path.startswith(self._prefix):
|
||||
if not path.startswith(self._prefix2) and path != self._prefix:
|
||||
return None, set()
|
||||
|
||||
if method not in allowed_methods:
|
||||
|
@ -748,7 +752,7 @@ class PrefixedSubAppResource(PrefixResource):
|
|||
|
||||
async def resolve(self, request: Request) -> _Resolve:
|
||||
if (
|
||||
not request.url.raw_path.startswith(self._prefix + "/")
|
||||
not request.url.raw_path.startswith(self._prefix2)
|
||||
and request.url.raw_path != self._prefix
|
||||
):
|
||||
return None, set()
|
||||
|
@ -878,7 +882,7 @@ class ResourceRoute(AbstractRoute):
|
|||
def __init__(
|
||||
self,
|
||||
method: str,
|
||||
handler: Union[_WebHandler, Type[AbstractView]],
|
||||
handler: Union[Handler, Type[AbstractView]],
|
||||
resource: AbstractResource,
|
||||
*,
|
||||
expect_handler: Optional[_ExpectHandler] = None,
|
||||
|
@ -942,7 +946,9 @@ class View(AbstractView):
|
|||
async def _iter(self) -> StreamResponse:
|
||||
if self.request.method not in hdrs.METH_ALL:
|
||||
self._raise_allowed_methods()
|
||||
method = getattr(self, self.request.method.lower(), None)
|
||||
method: Callable[[], Awaitable[StreamResponse]] = getattr(
|
||||
self, self.request.method.lower(), None
|
||||
)
|
||||
if method is None:
|
||||
self._raise_allowed_methods()
|
||||
resp = await method()
|
||||
|
@ -972,7 +978,7 @@ class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResourc
|
|||
|
||||
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
|
||||
def __init__(self, resources: List[AbstractResource]):
|
||||
self._routes = [] # type: List[AbstractRoute]
|
||||
self._routes: List[AbstractRoute] = []
|
||||
for resource in resources:
|
||||
for route in resource:
|
||||
self._routes.append(route)
|
||||
|
@ -993,12 +999,12 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._resources = [] # type: List[AbstractResource]
|
||||
self._named_resources = {} # type: Dict[str, AbstractResource]
|
||||
self._resources: List[AbstractResource] = []
|
||||
self._named_resources: Dict[str, AbstractResource] = {}
|
||||
|
||||
async def resolve(self, request: Request) -> AbstractMatchInfo:
|
||||
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
|
||||
method = request.method
|
||||
allowed_methods = set() # type: Set[str]
|
||||
allowed_methods: Set[str] = set()
|
||||
|
||||
for resource in self._resources:
|
||||
match_dict, allowed = await resource.resolve(request)
|
||||
|
@ -1006,11 +1012,11 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||
return match_dict
|
||||
else:
|
||||
allowed_methods |= allowed
|
||||
|
||||
if allowed_methods:
|
||||
return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
|
||||
else:
|
||||
if allowed_methods:
|
||||
return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
|
||||
else:
|
||||
return MatchInfoError(HTTPNotFound())
|
||||
return MatchInfoError(HTTPNotFound())
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self._named_resources)
|
||||
|
@ -1086,7 +1092,7 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
handler: Union[_WebHandler, Type[AbstractView]],
|
||||
handler: Union[Handler, Type[AbstractView]],
|
||||
*,
|
||||
name: Optional[str] = None,
|
||||
expect_handler: Optional[_ExpectHandler] = None,
|
||||
|
@ -1128,72 +1134,53 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||
self.register_resource(resource)
|
||||
return resource
|
||||
|
||||
def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method HEAD
|
||||
"""
|
||||
def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method HEAD."""
|
||||
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
|
||||
|
||||
def add_options(
|
||||
self, path: str, handler: _WebHandler, **kwargs: Any
|
||||
) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method OPTIONS
|
||||
"""
|
||||
def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method OPTIONS."""
|
||||
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
||||
|
||||
def add_get(
|
||||
self,
|
||||
path: str,
|
||||
handler: _WebHandler,
|
||||
handler: Handler,
|
||||
*,
|
||||
name: Optional[str] = None,
|
||||
allow_head: bool = True,
|
||||
**kwargs: Any,
|
||||
) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method GET, if allow_head is true another
|
||||
route is added allowing head requests to the same endpoint
|
||||
"""Shortcut for add_route with method GET.
|
||||
|
||||
If allow_head is true, another
|
||||
route is added allowing head requests to the same endpoint.
|
||||
"""
|
||||
resource = self.add_resource(path, name=name)
|
||||
if allow_head:
|
||||
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
|
||||
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
|
||||
|
||||
def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method POST
|
||||
"""
|
||||
def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method POST."""
|
||||
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
|
||||
|
||||
def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method PUT
|
||||
"""
|
||||
def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method PUT."""
|
||||
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
|
||||
|
||||
def add_patch(
|
||||
self, path: str, handler: _WebHandler, **kwargs: Any
|
||||
) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method PATCH
|
||||
"""
|
||||
def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method PATCH."""
|
||||
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
|
||||
|
||||
def add_delete(
|
||||
self, path: str, handler: _WebHandler, **kwargs: Any
|
||||
) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with method DELETE
|
||||
"""
|
||||
def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
||||
"""Shortcut for add_route with method DELETE."""
|
||||
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
|
||||
|
||||
def add_view(
|
||||
self, path: str, handler: Type[AbstractView], **kwargs: Any
|
||||
) -> AbstractRoute:
|
||||
"""
|
||||
Shortcut for add_route with ANY methods for a class-based view
|
||||
"""
|
||||
"""Shortcut for add_route with ANY methods for a class-based view."""
|
||||
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
|
||||
|
||||
def freeze(self) -> None:
|
||||
|
|
|
@ -3,7 +3,7 @@ import base64
|
|||
import binascii
|
||||
import hashlib
|
||||
import json
|
||||
from typing import Any, Iterable, Optional, Tuple
|
||||
from typing import Any, Iterable, Optional, Tuple, cast
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
|
@ -19,6 +19,7 @@ from .http import (
|
|||
WebSocketError,
|
||||
WebSocketReader,
|
||||
WebSocketWriter,
|
||||
WSCloseCode,
|
||||
WSMessage,
|
||||
WSMsgType as WSMsgType,
|
||||
ws_ext_gen,
|
||||
|
@ -26,7 +27,7 @@ from .http import (
|
|||
)
|
||||
from .log import ws_logger
|
||||
from .streams import EofStream, FlowControlDataQueue
|
||||
from .typedefs import JSONDecoder, JSONEncoder
|
||||
from .typedefs import Final, JSONDecoder, JSONEncoder
|
||||
from .web_exceptions import HTTPBadRequest, HTTPException
|
||||
from .web_request import BaseRequest
|
||||
from .web_response import StreamResponse
|
||||
|
@ -37,7 +38,7 @@ __all__ = (
|
|||
"WSMsgType",
|
||||
)
|
||||
|
||||
THRESHOLD_CONNLOST_ACCESS = 5
|
||||
THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
|
@ -67,25 +68,25 @@ class WebSocketResponse(StreamResponse):
|
|||
) -> None:
|
||||
super().__init__(status=101)
|
||||
self._protocols = protocols
|
||||
self._ws_protocol = None # type: Optional[str]
|
||||
self._writer = None # type: Optional[WebSocketWriter]
|
||||
self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]]
|
||||
self._ws_protocol: Optional[str] = None
|
||||
self._writer: Optional[WebSocketWriter] = None
|
||||
self._reader: Optional[FlowControlDataQueue[WSMessage]] = None
|
||||
self._closed = False
|
||||
self._closing = False
|
||||
self._conn_lost = 0
|
||||
self._close_code = None # type: Optional[int]
|
||||
self._loop = None # type: Optional[asyncio.AbstractEventLoop]
|
||||
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
||||
self._exception = None # type: Optional[BaseException]
|
||||
self._close_code: Optional[int] = None
|
||||
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
self._waiting: Optional[asyncio.Future[bool]] = None
|
||||
self._exception: Optional[BaseException] = None
|
||||
self._timeout = timeout
|
||||
self._receive_timeout = receive_timeout
|
||||
self._autoclose = autoclose
|
||||
self._autoping = autoping
|
||||
self._heartbeat = heartbeat
|
||||
self._heartbeat_cb = None
|
||||
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
||||
if heartbeat is not None:
|
||||
self._pong_heartbeat = heartbeat / 2.0
|
||||
self._pong_response_cb = None
|
||||
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
||||
self._compress = compress
|
||||
self._max_msg_size = max_msg_size
|
||||
|
||||
|
@ -102,16 +103,18 @@ class WebSocketResponse(StreamResponse):
|
|||
self._cancel_heartbeat()
|
||||
|
||||
if self._heartbeat is not None:
|
||||
assert self._loop is not None
|
||||
self._heartbeat_cb = call_later(
|
||||
self._send_heartbeat, self._heartbeat, self._loop
|
||||
)
|
||||
|
||||
def _send_heartbeat(self) -> None:
|
||||
if self._heartbeat is not None and not self._closed:
|
||||
assert self._loop is not None
|
||||
# fire-and-forget a task is not perfect but maybe ok for
|
||||
# sending ping. Otherwise we need a long-living heartbeat
|
||||
# task in the class.
|
||||
self._loop.create_task(self._writer.ping()) # type: ignore
|
||||
self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
|
||||
|
||||
if self._pong_response_cb is not None:
|
||||
self._pong_response_cb.cancel()
|
||||
|
@ -122,7 +125,7 @@ class WebSocketResponse(StreamResponse):
|
|||
def _pong_not_received(self) -> None:
|
||||
if self._req is not None and self._req.transport is not None:
|
||||
self._closed = True
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = asyncio.TimeoutError()
|
||||
self._req.transport.close()
|
||||
|
||||
|
@ -193,9 +196,9 @@ class WebSocketResponse(StreamResponse):
|
|||
accept_val = base64.b64encode(
|
||||
hashlib.sha1(key.encode() + WS_KEY).digest()
|
||||
).decode()
|
||||
response_headers = CIMultiDict( # type: ignore
|
||||
response_headers = CIMultiDict(
|
||||
{
|
||||
hdrs.UPGRADE: "websocket", # type: ignore
|
||||
hdrs.UPGRADE: "websocket",
|
||||
hdrs.CONNECTION: "upgrade",
|
||||
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
|
||||
}
|
||||
|
@ -216,7 +219,12 @@ class WebSocketResponse(StreamResponse):
|
|||
|
||||
if protocol:
|
||||
response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
|
||||
return (response_headers, protocol, compress, notakeover) # type: ignore
|
||||
return (
|
||||
response_headers,
|
||||
protocol,
|
||||
compress,
|
||||
notakeover,
|
||||
) # type: ignore[return-value]
|
||||
|
||||
def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
|
||||
self._loop = request._loop
|
||||
|
@ -245,7 +253,7 @@ class WebSocketResponse(StreamResponse):
|
|||
|
||||
loop = self._loop
|
||||
assert loop is not None
|
||||
self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
|
||||
self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop)
|
||||
request.protocol.set_parser(
|
||||
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
|
||||
)
|
||||
|
@ -315,7 +323,7 @@ class WebSocketResponse(StreamResponse):
|
|||
) -> None:
|
||||
await self.send_str(dumps(data), compress=compress)
|
||||
|
||||
async def write_eof(self) -> None: # type: ignore
|
||||
async def write_eof(self) -> None: # type: ignore[override]
|
||||
if self._eof_sent:
|
||||
return
|
||||
if self._payload_writer is None:
|
||||
|
@ -324,7 +332,7 @@ class WebSocketResponse(StreamResponse):
|
|||
await self.close()
|
||||
self._eof_sent = True
|
||||
|
||||
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
|
||||
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
||||
if self._writer is None:
|
||||
raise RuntimeError("Call .prepare() first")
|
||||
|
||||
|
@ -346,10 +354,10 @@ class WebSocketResponse(StreamResponse):
|
|||
assert writer is not None
|
||||
await writer.drain()
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
return True
|
||||
|
||||
|
@ -359,13 +367,13 @@ class WebSocketResponse(StreamResponse):
|
|||
reader = self._reader
|
||||
assert reader is not None
|
||||
try:
|
||||
with async_timeout.timeout(self._timeout, loop=self._loop):
|
||||
async with async_timeout.timeout(self._timeout):
|
||||
msg = await reader.read()
|
||||
except asyncio.CancelledError:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except Exception as exc:
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = exc
|
||||
return True
|
||||
|
||||
|
@ -373,7 +381,7 @@ class WebSocketResponse(StreamResponse):
|
|||
self._close_code = msg.data
|
||||
return True
|
||||
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
self._exception = asyncio.TimeoutError()
|
||||
return True
|
||||
else:
|
||||
|
@ -400,9 +408,7 @@ class WebSocketResponse(StreamResponse):
|
|||
try:
|
||||
self._waiting = loop.create_future()
|
||||
try:
|
||||
with async_timeout.timeout(
|
||||
timeout or self._receive_timeout, loop=self._loop
|
||||
):
|
||||
async with async_timeout.timeout(timeout or self._receive_timeout):
|
||||
msg = await self._reader.read()
|
||||
self._reset_heartbeat()
|
||||
finally:
|
||||
|
@ -410,10 +416,10 @@ class WebSocketResponse(StreamResponse):
|
|||
set_result(waiter, True)
|
||||
self._waiting = None
|
||||
except (asyncio.CancelledError, asyncio.TimeoutError):
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
raise
|
||||
except EofStream:
|
||||
self._close_code = 1000
|
||||
self._close_code = WSCloseCode.OK
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.CLOSED, None, None)
|
||||
except WebSocketError as exc:
|
||||
|
@ -423,7 +429,7 @@ class WebSocketResponse(StreamResponse):
|
|||
except Exception as exc:
|
||||
self._exception = exc
|
||||
self._closing = True
|
||||
self._close_code = 1006
|
||||
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
||||
await self.close()
|
||||
return WSMessage(WSMsgType.ERROR, exc, None)
|
||||
|
||||
|
@ -450,13 +456,13 @@ class WebSocketResponse(StreamResponse):
|
|||
msg.type, msg.data
|
||||
)
|
||||
)
|
||||
return msg.data
|
||||
return cast(str, msg.data)
|
||||
|
||||
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
||||
msg = await self.receive(timeout)
|
||||
if msg.type != WSMsgType.BINARY:
|
||||
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
||||
return msg.data
|
||||
return cast(bytes, msg.data)
|
||||
|
||||
async def receive_json(
|
||||
self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
|
||||
|
|
|
@ -22,14 +22,14 @@ try:
|
|||
|
||||
SSLContext = ssl.SSLContext
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = None # type: ignore
|
||||
SSLContext = object # type: ignore
|
||||
ssl = None # type: ignore[assignment]
|
||||
SSLContext = object # type: ignore[misc,assignment]
|
||||
|
||||
|
||||
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
|
||||
|
||||
|
||||
class GunicornWebWorker(base.Worker):
|
||||
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
|
||||
|
||||
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
|
||||
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
|
||||
|
@ -37,9 +37,9 @@ class GunicornWebWorker(base.Worker):
|
|||
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
|
||||
super().__init__(*args, **kw)
|
||||
|
||||
self._task = None # type: Optional[asyncio.Task[None]]
|
||||
self._task: Optional[asyncio.Task[None]] = None
|
||||
self.exit_code = 0
|
||||
self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
|
||||
self._notify_waiter: Optional[asyncio.Future[bool]] = None
|
||||
|
||||
def init_process(self) -> None:
|
||||
# create new event_loop after fork
|
||||
|
@ -57,30 +57,39 @@ class GunicornWebWorker(base.Worker):
|
|||
self.loop.run_until_complete(self._task)
|
||||
except Exception:
|
||||
self.log.exception("Exception in gunicorn worker")
|
||||
if sys.version_info >= (3, 6):
|
||||
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
||||
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
||||
self.loop.close()
|
||||
|
||||
sys.exit(self.exit_code)
|
||||
|
||||
async def _run(self) -> None:
|
||||
runner = None
|
||||
if isinstance(self.wsgi, Application):
|
||||
app = self.wsgi
|
||||
elif asyncio.iscoroutinefunction(self.wsgi):
|
||||
app = await self.wsgi()
|
||||
wsgi = await self.wsgi()
|
||||
if isinstance(wsgi, web.AppRunner):
|
||||
runner = wsgi
|
||||
app = runner.app
|
||||
else:
|
||||
app = wsgi
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"wsgi app should be either Application or "
|
||||
"async function returning Application, got {}".format(self.wsgi)
|
||||
)
|
||||
access_log = self.log.access_log if self.cfg.accesslog else None
|
||||
runner = web.AppRunner(
|
||||
app,
|
||||
logger=self.log,
|
||||
keepalive_timeout=self.cfg.keepalive,
|
||||
access_log=access_log,
|
||||
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
|
||||
)
|
||||
|
||||
if runner is None:
|
||||
access_log = self.log.access_log if self.cfg.accesslog else None
|
||||
runner = web.AppRunner(
|
||||
app,
|
||||
logger=self.log,
|
||||
keepalive_timeout=self.cfg.keepalive,
|
||||
access_log=access_log,
|
||||
access_log_format=self._get_valid_log_format(
|
||||
self.cfg.access_log_format
|
||||
),
|
||||
)
|
||||
await runner.setup()
|
||||
|
||||
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
|
||||
|
@ -101,7 +110,7 @@ class GunicornWebWorker(base.Worker):
|
|||
# If our parent changed then we shut down.
|
||||
pid = os.getpid()
|
||||
try:
|
||||
while self.alive: # type: ignore
|
||||
while self.alive: # type: ignore[has-type]
|
||||
self.notify()
|
||||
|
||||
cnt = server.requests_count
|
||||
|
@ -171,6 +180,14 @@ class GunicornWebWorker(base.Worker):
|
|||
# by interrupting system calls
|
||||
signal.siginterrupt(signal.SIGTERM, False)
|
||||
signal.siginterrupt(signal.SIGUSR1, False)
|
||||
# Reset signals so Gunicorn doesn't swallow subprocess return codes
|
||||
# See: https://github.com/aio-libs/aiohttp/issues/6130
|
||||
if sys.version_info < (3, 8):
|
||||
# Starting from Python 3.8,
|
||||
# the default child watcher is ThreadedChildWatcher.
|
||||
# The watcher doesn't depend on SIGCHLD signal,
|
||||
# there is no need to reset it.
|
||||
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
|
||||
|
||||
def handle_quit(self, sig: int, frame: FrameType) -> None:
|
||||
self.alive = False
|
||||
|
|
|
@ -43,8 +43,8 @@ async def listen_to_redis(app):
|
|||
print("Redis connection closed.")
|
||||
|
||||
|
||||
async def start_background_tasks(app):
|
||||
app["redis_listener"] = app.loop.create_task(listen_to_redis(app))
|
||||
async def start_background_tasks(app: web.Application) -> None:
|
||||
app["redis_listener"] = asyncio.create_task(listen_to_redis(app))
|
||||
|
||||
|
||||
async def cleanup_background_tasks(app):
|
||||
|
|
|
@ -4,7 +4,7 @@ import asyncio
|
|||
import aiohttp
|
||||
|
||||
|
||||
async def fetch(session):
|
||||
async def fetch(session: aiohttp.ClientSession) -> None:
|
||||
print("Query http://httpbin.org/get")
|
||||
async with session.get("http://httpbin.org/get") as resp:
|
||||
print(resp.status)
|
||||
|
|
|
@ -44,8 +44,9 @@ async def start_client(loop, url):
|
|||
break
|
||||
|
||||
# send request
|
||||
async with aiohttp.ws_connect(url, autoclose=False, autoping=False) as ws:
|
||||
await dispatch()
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.ws_connect(url, autoclose=False, autoping=False) as ws:
|
||||
await dispatch()
|
||||
|
||||
|
||||
ARGS = argparse.ArgumentParser(
|
||||
|
|
|
@ -6,11 +6,12 @@ import ssl
|
|||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.abc import AbstractResolver
|
||||
from aiohttp.resolver import DefaultResolver
|
||||
from aiohttp.test_utils import unused_port
|
||||
|
||||
|
||||
class FakeResolver:
|
||||
class FakeResolver(AbstractResolver):
|
||||
_LOCAL_HOST = {0: "127.0.0.1", socket.AF_INET: "127.0.0.1", socket.AF_INET6: "::1"}
|
||||
|
||||
def __init__(self, fakes, *, loop):
|
||||
|
@ -34,6 +35,9 @@ class FakeResolver:
|
|||
else:
|
||||
return await self._resolver.resolve(host, port, family)
|
||||
|
||||
async def close(self) -> None:
|
||||
self._resolver.close()
|
||||
|
||||
|
||||
class FakeFacebook:
|
||||
def __init__(self, *, loop):
|
||||
|
@ -45,7 +49,7 @@ class FakeFacebook:
|
|||
web.get("/v2.7/me/friends", self.on_my_friends),
|
||||
]
|
||||
)
|
||||
self.runner = None
|
||||
self.runner = web.AppRunner(self.app)
|
||||
here = pathlib.Path(__file__)
|
||||
ssl_cert = here.parent / "server.crt"
|
||||
ssl_key = here.parent / "server.key"
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
||||
class Crawler:
|
||||
def __init__(self, rooturl, loop, maxtasks=100):
|
||||
self.rooturl = rooturl
|
||||
self.loop = loop
|
||||
self.todo = set()
|
||||
self.busy = set()
|
||||
self.done = {}
|
||||
self.tasks = set()
|
||||
self.sem = asyncio.Semaphore(maxtasks, loop=loop)
|
||||
|
||||
# connector stores cookies between requests and uses connection pool
|
||||
self.session = aiohttp.ClientSession(loop=loop)
|
||||
|
||||
async def run(self):
|
||||
t = asyncio.ensure_future(self.addurls([(self.rooturl, "")]), loop=self.loop)
|
||||
await asyncio.sleep(1, loop=self.loop)
|
||||
while self.busy:
|
||||
await asyncio.sleep(1, loop=self.loop)
|
||||
|
||||
await t
|
||||
await self.session.close()
|
||||
self.loop.stop()
|
||||
|
||||
async def addurls(self, urls):
|
||||
for url, parenturl in urls:
|
||||
url = urllib.parse.urljoin(parenturl, url)
|
||||
url, frag = urllib.parse.urldefrag(url)
|
||||
if (
|
||||
url.startswith(self.rooturl)
|
||||
and url not in self.busy
|
||||
and url not in self.done
|
||||
and url not in self.todo
|
||||
):
|
||||
self.todo.add(url)
|
||||
await self.sem.acquire()
|
||||
task = asyncio.ensure_future(self.process(url), loop=self.loop)
|
||||
task.add_done_callback(lambda t: self.sem.release())
|
||||
task.add_done_callback(self.tasks.remove)
|
||||
self.tasks.add(task)
|
||||
|
||||
async def process(self, url):
|
||||
print("processing:", url)
|
||||
|
||||
self.todo.remove(url)
|
||||
self.busy.add(url)
|
||||
try:
|
||||
resp = await self.session.get(url)
|
||||
except Exception as exc:
|
||||
print("...", url, "has error", repr(str(exc)))
|
||||
self.done[url] = False
|
||||
else:
|
||||
if resp.status == 200 and ("text/html" in resp.headers.get("content-type")):
|
||||
data = (await resp.read()).decode("utf-8", "replace")
|
||||
urls = re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', data)
|
||||
asyncio.Task(self.addurls([(u, url) for u in urls]))
|
||||
|
||||
resp.close()
|
||||
self.done[url] = True
|
||||
|
||||
self.busy.remove(url)
|
||||
print(
|
||||
len(self.done),
|
||||
"completed tasks,",
|
||||
len(self.tasks),
|
||||
"still pending, todo",
|
||||
len(self.todo),
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
c = Crawler(sys.argv[1], loop)
|
||||
asyncio.ensure_future(c.run(), loop=loop)
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT, loop.stop)
|
||||
except RuntimeError:
|
||||
pass
|
||||
loop.run_forever()
|
||||
print("todo:", len(c.todo))
|
||||
print("busy:", len(c.busy))
|
||||
print("done:", len(c.done), "; ok:", sum(c.done.values()))
|
||||
print("tasks:", len(c.tasks))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "--iocp" in sys.argv:
|
||||
from asyncio import events, windows_events
|
||||
|
||||
sys.argv.remove("--iocp")
|
||||
logging.info("using iocp")
|
||||
el = windows_events.ProactorEventLoop()
|
||||
events.set_event_loop(el)
|
||||
|
||||
main()
|
|
@ -1,178 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Simple server written using an event loop."""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import aiohttp
|
||||
import aiohttp.server
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError: # pragma: no cover
|
||||
ssl = None
|
||||
|
||||
|
||||
class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
|
||||
async def handle_request(self, message, payload):
|
||||
print(
|
||||
"method = {!r}; path = {!r}; version = {!r}".format(
|
||||
message.method, message.path, message.version
|
||||
)
|
||||
)
|
||||
|
||||
path = message.path
|
||||
|
||||
if not (path.isprintable() and path.startswith("/")) or "/." in path:
|
||||
print("bad path", repr(path))
|
||||
path = None
|
||||
else:
|
||||
path = "." + path
|
||||
if not os.path.exists(path):
|
||||
print("no file", repr(path))
|
||||
path = None
|
||||
else:
|
||||
isdir = os.path.isdir(path)
|
||||
|
||||
if not path:
|
||||
raise aiohttp.HttpProcessingError(code=404)
|
||||
|
||||
for hdr, val in message.headers.items():
|
||||
print(hdr, val)
|
||||
|
||||
if isdir and not path.endswith("/"):
|
||||
path = path + "/"
|
||||
raise aiohttp.HttpProcessingError(
|
||||
code=302, headers=(("URI", path), ("Location", path))
|
||||
)
|
||||
|
||||
response = aiohttp.Response(self.writer, 200, http_version=message.version)
|
||||
response.add_header("Transfer-Encoding", "chunked")
|
||||
|
||||
# content encoding
|
||||
accept_encoding = message.headers.get("accept-encoding", "").lower()
|
||||
if "deflate" in accept_encoding:
|
||||
response.add_header("Content-Encoding", "deflate")
|
||||
response.add_compression_filter("deflate")
|
||||
elif "gzip" in accept_encoding:
|
||||
response.add_header("Content-Encoding", "gzip")
|
||||
response.add_compression_filter("gzip")
|
||||
|
||||
response.add_chunking_filter(1025)
|
||||
|
||||
if isdir:
|
||||
response.add_header("Content-type", "text/html")
|
||||
response.send_headers()
|
||||
|
||||
response.write(b"<ul>\r\n")
|
||||
for name in sorted(os.listdir(path)):
|
||||
if name.isprintable() and not name.startswith("."):
|
||||
try:
|
||||
bname = name.encode("ascii")
|
||||
except UnicodeError:
|
||||
pass
|
||||
else:
|
||||
if os.path.isdir(os.path.join(path, name)):
|
||||
response.write(
|
||||
b'<li><a href="'
|
||||
+ bname
|
||||
+ b'/">'
|
||||
+ bname
|
||||
+ b"/</a></li>\r\n"
|
||||
)
|
||||
else:
|
||||
response.write(
|
||||
b'<li><a href="'
|
||||
+ bname
|
||||
+ b'">'
|
||||
+ bname
|
||||
+ b"</a></li>\r\n"
|
||||
)
|
||||
response.write(b"</ul>")
|
||||
else:
|
||||
response.add_header("Content-type", "text/plain")
|
||||
response.send_headers()
|
||||
|
||||
try:
|
||||
with open(path, "rb") as fp:
|
||||
chunk = fp.read(8192)
|
||||
while chunk:
|
||||
response.write(chunk)
|
||||
chunk = fp.read(8192)
|
||||
except OSError:
|
||||
response.write(b"Cannot open")
|
||||
|
||||
await response.write_eof()
|
||||
if response.keep_alive():
|
||||
self.keep_alive(True)
|
||||
|
||||
|
||||
ARGS = argparse.ArgumentParser(description="Run simple HTTP server.")
|
||||
ARGS.add_argument(
|
||||
"--host", action="store", dest="host", default="127.0.0.1", help="Host name"
|
||||
)
|
||||
ARGS.add_argument(
|
||||
"--port", action="store", dest="port", default=8080, type=int, help="Port number"
|
||||
)
|
||||
# make iocp and ssl mutually exclusive because ProactorEventLoop is
|
||||
# incompatible with SSL
|
||||
group = ARGS.add_mutually_exclusive_group()
|
||||
group.add_argument(
|
||||
"--iocp", action="store_true", dest="iocp", help="Windows IOCP event loop"
|
||||
)
|
||||
group.add_argument("--ssl", action="store_true", dest="ssl", help="Run ssl mode.")
|
||||
ARGS.add_argument("--sslcert", action="store", dest="certfile", help="SSL cert file.")
|
||||
ARGS.add_argument("--sslkey", action="store", dest="keyfile", help="SSL key file.")
|
||||
|
||||
|
||||
def main():
|
||||
args = ARGS.parse_args()
|
||||
|
||||
if ":" in args.host:
|
||||
args.host, port = args.host.split(":", 1)
|
||||
args.port = int(port)
|
||||
|
||||
if args.iocp:
|
||||
from asyncio import windows_events
|
||||
|
||||
sys.argv.remove("--iocp")
|
||||
logging.info("using iocp")
|
||||
el = windows_events.ProactorEventLoop()
|
||||
asyncio.set_event_loop(el)
|
||||
|
||||
if args.ssl:
|
||||
here = os.path.join(os.path.dirname(__file__), "tests")
|
||||
|
||||
if args.certfile:
|
||||
certfile = args.certfile or os.path.join(here, "sample.crt")
|
||||
keyfile = args.keyfile or os.path.join(here, "sample.key")
|
||||
else:
|
||||
certfile = os.path.join(here, "sample.crt")
|
||||
keyfile = os.path.join(here, "sample.key")
|
||||
|
||||
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
sslcontext.load_cert_chain(certfile, keyfile)
|
||||
else:
|
||||
sslcontext = None
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
f = loop.create_server(
|
||||
lambda: HttpRequestHandler(debug=True, keep_alive=75),
|
||||
args.host,
|
||||
args.port,
|
||||
ssl=sslcontext,
|
||||
)
|
||||
svr = loop.run_until_complete(f)
|
||||
socks = svr.sockets
|
||||
print("serving on", socks[0].getsockname())
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,172 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Protocol parser example."""
|
||||
import argparse
|
||||
import asyncio
|
||||
import collections
|
||||
|
||||
import aiohttp
|
||||
|
||||
try:
|
||||
import signal
|
||||
except ImportError:
|
||||
signal = None
|
||||
|
||||
|
||||
MSG_TEXT = b"text:"
|
||||
MSG_PING = b"ping:"
|
||||
MSG_PONG = b"pong:"
|
||||
MSG_STOP = b"stop:"
|
||||
|
||||
Message = collections.namedtuple("Message", ("tp", "data"))
|
||||
|
||||
|
||||
def my_protocol_parser(out, buf):
|
||||
"""Parser is used with StreamParser for incremental protocol parsing.
|
||||
Parser is a generator function, but it is not a coroutine. Usually
|
||||
parsers are implemented as a state machine.
|
||||
|
||||
more details in asyncio/parsers.py
|
||||
existing parsers:
|
||||
* HTTP protocol parsers asyncio/http/protocol.py
|
||||
* websocket parser asyncio/http/websocket.py
|
||||
"""
|
||||
while True:
|
||||
tp = yield from buf.read(5)
|
||||
if tp in (MSG_PING, MSG_PONG):
|
||||
# skip line
|
||||
yield from buf.skipuntil(b"\r\n")
|
||||
out.feed_data(Message(tp, None))
|
||||
elif tp == MSG_STOP:
|
||||
out.feed_data(Message(tp, None))
|
||||
elif tp == MSG_TEXT:
|
||||
# read text
|
||||
text = yield from buf.readuntil(b"\r\n")
|
||||
out.feed_data(Message(tp, text.strip().decode("utf-8")))
|
||||
else:
|
||||
raise ValueError("Unknown protocol prefix.")
|
||||
|
||||
|
||||
class MyProtocolWriter:
|
||||
def __init__(self, transport):
|
||||
self.transport = transport
|
||||
|
||||
def ping(self):
|
||||
self.transport.write(b"ping:\r\n")
|
||||
|
||||
def pong(self):
|
||||
self.transport.write(b"pong:\r\n")
|
||||
|
||||
def stop(self):
|
||||
self.transport.write(b"stop:\r\n")
|
||||
|
||||
def send_text(self, text):
|
||||
self.transport.write(f"text:{text.strip()}\r\n".encode("utf-8"))
|
||||
|
||||
|
||||
class EchoServer(asyncio.Protocol):
|
||||
def connection_made(self, transport):
|
||||
print("Connection made")
|
||||
self.transport = transport
|
||||
self.stream = aiohttp.StreamParser()
|
||||
asyncio.Task(self.dispatch())
|
||||
|
||||
def data_received(self, data):
|
||||
self.stream.feed_data(data)
|
||||
|
||||
def eof_received(self):
|
||||
self.stream.feed_eof()
|
||||
|
||||
def connection_lost(self, exc):
|
||||
print("Connection lost")
|
||||
|
||||
async def dispatch(self):
|
||||
reader = self.stream.set_parser(my_protocol_parser)
|
||||
writer = MyProtocolWriter(self.transport)
|
||||
|
||||
while True:
|
||||
try:
|
||||
msg = await reader.read()
|
||||
except aiohttp.ConnectionError:
|
||||
# client has been disconnected
|
||||
break
|
||||
|
||||
print(f"Message received: {msg}")
|
||||
|
||||
if msg.type == MSG_PING:
|
||||
writer.pong()
|
||||
elif msg.type == MSG_TEXT:
|
||||
writer.send_text("Re: " + msg.data)
|
||||
elif msg.type == MSG_STOP:
|
||||
self.transport.close()
|
||||
break
|
||||
|
||||
|
||||
async def start_client(loop, host, port):
|
||||
transport, stream = await loop.create_connection(aiohttp.StreamProtocol, host, port)
|
||||
reader = stream.reader.set_parser(my_protocol_parser)
|
||||
writer = MyProtocolWriter(transport)
|
||||
writer.ping()
|
||||
|
||||
message = "This is the message. It will be echoed."
|
||||
|
||||
while True:
|
||||
try:
|
||||
msg = await reader.read()
|
||||
except aiohttp.ConnectionError:
|
||||
print("Server has been disconnected.")
|
||||
break
|
||||
|
||||
print(f"Message received: {msg}")
|
||||
if msg.type == MSG_PONG:
|
||||
writer.send_text(message)
|
||||
print("data sent:", message)
|
||||
elif msg.type == MSG_TEXT:
|
||||
writer.stop()
|
||||
print("stop sent")
|
||||
break
|
||||
|
||||
transport.close()
|
||||
|
||||
|
||||
def start_server(loop, host, port):
|
||||
f = loop.create_server(EchoServer, host, port)
|
||||
srv = loop.run_until_complete(f)
|
||||
x = srv.sockets[0]
|
||||
print("serving on", x.getsockname())
|
||||
loop.run_forever()
|
||||
|
||||
|
||||
ARGS = argparse.ArgumentParser(description="Protocol parser example.")
|
||||
ARGS.add_argument(
|
||||
"--server", action="store_true", dest="server", default=False, help="Run tcp server"
|
||||
)
|
||||
ARGS.add_argument(
|
||||
"--client", action="store_true", dest="client", default=False, help="Run tcp client"
|
||||
)
|
||||
ARGS.add_argument(
|
||||
"--host", action="store", dest="host", default="127.0.0.1", help="Host name"
|
||||
)
|
||||
ARGS.add_argument(
|
||||
"--port", action="store", dest="port", default=9999, type=int, help="Port number"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = ARGS.parse_args()
|
||||
|
||||
if ":" in args.host:
|
||||
args.host, port = args.host.split(":", 1)
|
||||
args.port = int(port)
|
||||
|
||||
if (not (args.server or args.client)) or (args.server and args.client):
|
||||
print("Please specify --server or --client\n")
|
||||
ARGS.print_help()
|
||||
else:
|
||||
loop = asyncio.get_event_loop()
|
||||
if signal is not None:
|
||||
loop.add_signal_handler(signal.SIGINT, loop.stop)
|
||||
|
||||
if args.server:
|
||||
start_server(loop, args.host, args.port)
|
||||
else:
|
||||
loop.run_until_complete(start_client(loop, args.host, args.port))
|
|
@ -1,6 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web class based views
|
||||
"""
|
||||
"""Example for aiohttp.web class based views."""
|
||||
|
||||
|
||||
import functools
|
||||
|
@ -14,7 +13,7 @@ class MyView(web.View):
|
|||
return web.json_response(
|
||||
{
|
||||
"method": "get",
|
||||
"args": dict(self.request.GET),
|
||||
"args": dict(self.request.query),
|
||||
"headers": dict(self.request.headers),
|
||||
},
|
||||
dumps=functools.partial(json.dumps, indent=4),
|
||||
|
@ -25,7 +24,7 @@ class MyView(web.View):
|
|||
return web.json_response(
|
||||
{
|
||||
"method": "post",
|
||||
"args": dict(self.request.GET),
|
||||
"args": dict(self.request.query),
|
||||
"data": dict(data),
|
||||
"headers": dict(self.request.headers),
|
||||
},
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web basic server with cookies.
|
||||
"""
|
||||
"""Example for aiohttp.web basic server with cookies."""
|
||||
|
||||
from pprint import pformat
|
||||
from typing import NoReturn
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
|
@ -22,20 +22,20 @@ async def root(request):
|
|||
return resp
|
||||
|
||||
|
||||
async def login(request):
|
||||
resp = web.HTTPFound(location="/")
|
||||
resp.set_cookie("AUTH", "secret")
|
||||
return resp
|
||||
async def login(request: web.Request) -> NoReturn:
|
||||
exc = web.HTTPFound(location="/")
|
||||
exc.set_cookie("AUTH", "secret")
|
||||
raise exc
|
||||
|
||||
|
||||
async def logout(request):
|
||||
resp = web.HTTPFound(location="/")
|
||||
resp.del_cookie("AUTH")
|
||||
return resp
|
||||
async def logout(request: web.Request) -> NoReturn:
|
||||
exc = web.HTTPFound(location="/")
|
||||
exc.del_cookie("AUTH")
|
||||
raise exc
|
||||
|
||||
|
||||
def init(loop):
|
||||
app = web.Application(loop=loop)
|
||||
def init():
|
||||
app = web.Application()
|
||||
app.router.add_get("/", root)
|
||||
app.router.add_get("/login", login)
|
||||
app.router.add_get("/logout", logout)
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Example for rewriting response headers by middleware.
|
||||
"""
|
||||
"""Example for rewriting response headers by middleware."""
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.typedefs import Handler
|
||||
|
||||
|
||||
async def handler(request):
|
||||
|
@ -11,7 +10,7 @@ async def handler(request):
|
|||
|
||||
|
||||
@web.middleware
|
||||
async def middleware(request, handler):
|
||||
async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse:
|
||||
try:
|
||||
response = await handler(request)
|
||||
except web.HTTPException as exc:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web basic server
|
||||
"""
|
||||
"""Example for aiohttp.web basic server."""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web basic server
|
||||
with decorator definition for routes
|
||||
"""
|
||||
"""Example for aiohttp.web basic server with decorator definition for routes."""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web basic server
|
||||
with table definition for routes
|
||||
"""
|
||||
"""Example for aiohttp.web basic server with table definition for routes."""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Example for aiohttp.web websocket server
|
||||
"""
|
||||
"""Example for aiohttp.web websocket server."""
|
||||
|
||||
import os
|
||||
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
[build-system]
|
||||
requires = [
|
||||
"setuptools >= 46.4.0",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.towncrier]
|
||||
package = "aiohttp"
|
||||
filename = "CHANGES.rst"
|
||||
|
@ -5,3 +11,9 @@ directory = "CHANGES/"
|
|||
title_format = "{version} ({project_date})"
|
||||
template = "CHANGES/.TEMPLATE.rst"
|
||||
issue_format = "`#{issue} <https://github.com/aio-libs/aiohttp/issues/{issue}>`_"
|
||||
|
||||
|
||||
[tool.cibuildwheel]
|
||||
test-command = ""
|
||||
# don't build PyPy wheels, install from source instead
|
||||
skip = "pp*"
|
||||
|
|
|
@ -1,8 +1,81 @@
|
|||
[aliases]
|
||||
test = pytest
|
||||
|
||||
[metadata]
|
||||
license_file = LICENSE.txt
|
||||
name = aiohttp
|
||||
version = attr: aiohttp.__version__
|
||||
url = https://github.com/aio-libs/aiohttp
|
||||
project_urls =
|
||||
Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org
|
||||
Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org
|
||||
CI: GitHub Actions = https://github.com/aio-libs/aiohttp/actions?query=workflow%%3ACI
|
||||
Coverage: codecov = https://codecov.io/github/aio-libs/aiohttp
|
||||
Docs: Changelog = https://docs.aiohttp.org/en/stable/changes.html
|
||||
Docs: RTD = https://docs.aiohttp.org
|
||||
GitHub: issues = https://github.com/aio-libs/aiohttp/issues
|
||||
GitHub: repo = https://github.com/aio-libs/aiohttp
|
||||
description = Async http client/server framework (asyncio)
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
maintainer = aiohttp team <team@aiohttp.org>
|
||||
maintainer_email = team@aiohttp.org
|
||||
license = Apache 2
|
||||
license_files = LICENSE.txt
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
|
||||
Framework :: AsyncIO
|
||||
|
||||
Intended Audience :: Developers
|
||||
|
||||
License :: OSI Approved :: Apache Software License
|
||||
|
||||
Operating System :: POSIX
|
||||
Operating System :: MacOS :: MacOS X
|
||||
Operating System :: Microsoft :: Windows
|
||||
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.6
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
|
||||
[options]
|
||||
python_requires = >=3.6
|
||||
packages = find:
|
||||
zip_safe = False
|
||||
include_package_data = True
|
||||
install_requires =
|
||||
attrs >= 17.3.0
|
||||
charset-normalizer >=2.0, < 4.0
|
||||
multidict >=4.5, < 7.0
|
||||
async_timeout >= 4.0.0a3, < 5.0
|
||||
asynctest == 0.13.0; python_version<"3.8"
|
||||
yarl >= 1.0, < 2.0
|
||||
idna-ssl >= 1.0; python_version<"3.7"
|
||||
typing_extensions >= 3.7.4; python_version<"3.8"
|
||||
frozenlist >= 1.1.1
|
||||
aiosignal >= 1.1.2
|
||||
|
||||
[options.exclude_package_data]
|
||||
* =
|
||||
*.c
|
||||
*.h
|
||||
|
||||
[options.extras_require]
|
||||
speedups =
|
||||
aiodns
|
||||
Brotli
|
||||
cchardet; python_version < "3.10"
|
||||
|
||||
[options.packages.find]
|
||||
exclude =
|
||||
examples
|
||||
|
||||
[options.package_data]
|
||||
* =
|
||||
*.so
|
||||
|
||||
[pep8]
|
||||
max-line-length = 79
|
||||
|
@ -11,7 +84,7 @@ max-line-length = 79
|
|||
zip_ok = false
|
||||
|
||||
[flake8]
|
||||
ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E704,W503,W504,F811
|
||||
ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E704,W503,W504,F811,D1,D4
|
||||
max-line-length = 88
|
||||
|
||||
[isort]
|
||||
|
@ -34,10 +107,27 @@ source = aiohttp, tests
|
|||
omit = site-packages
|
||||
|
||||
[tool:pytest]
|
||||
addopts = --cov=aiohttp -v -rxXs --durations 10
|
||||
addopts =
|
||||
--durations=10
|
||||
|
||||
-v
|
||||
|
||||
-ra
|
||||
|
||||
--showlocals
|
||||
|
||||
--cov=aiohttp
|
||||
--cov=tests/
|
||||
filterwarnings =
|
||||
error
|
||||
ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning
|
||||
ignore:unclosed transport <asyncio.sslproto._SSLProtocolTransport object.*:ResourceWarning
|
||||
ignore:unclosed transport <_ProactorSocketTransport closing fd=-1>:ResourceWarning
|
||||
ignore:Unclosed client session <aiohttp.client.ClientSession object at 0x:ResourceWarning
|
||||
ignore:The loop argument is deprecated:DeprecationWarning:asyncio
|
||||
ignore:Creating a LegacyVersion has been deprecated and will be removed in the next major release:DeprecationWarning::
|
||||
ignore:module 'sre_constants' is deprecated:DeprecationWarning:pkg_resources._vendor.pyparsing
|
||||
ignore:path is deprecated. Use files.. instead. Refer to https.//importlib-resources.readthedocs.io/en/latest/using.html#migrating-from-legacy for migration advice.:DeprecationWarning:certifi.core
|
||||
junit_suite_name = aiohttp_test_suite
|
||||
norecursedirs = dist docs build .tox .eggs
|
||||
minversion = 3.8.2
|
||||
|
@ -45,48 +135,6 @@ testpaths = tests/
|
|||
junit_family = xunit2
|
||||
xfail_strict = true
|
||||
|
||||
[mypy]
|
||||
follow_imports = silent
|
||||
strict_optional = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
check_untyped_defs = True
|
||||
disallow_any_generics = True
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-pytest]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-uvloop]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-tokio]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-async_generator]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-aiodns]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-gunicorn.config]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-gunicorn.workers]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-brotli]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-chardet]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-cchardet]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[mypy-idna_ssl]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[egg_info]
|
||||
tag_build =
|
||||
tag_date = 0
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
|
||||
|
||||
from setuptools import Extension, setup
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
raise RuntimeError("aiohttp 3.7+ requires Python 3.6+")
|
||||
|
||||
here = pathlib.Path(__file__).parent
|
||||
raise RuntimeError("aiohttp 3.x requires Python 3.6+")
|
||||
|
||||
|
||||
if (here / ".git").exists() and not (here / "vendor/http-parser/README.md").exists():
|
||||
NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
||||
HERE = pathlib.Path(__file__).parent
|
||||
IS_GIT_REPO = (HERE / ".git").exists()
|
||||
|
||||
|
||||
if sys.implementation.name != "cpython":
|
||||
NO_EXTENSIONS = True
|
||||
|
||||
|
||||
if IS_GIT_REPO and not (HERE / "vendor/llhttp/README.md").exists():
|
||||
print("Install submodules when building from git clone", file=sys.stderr)
|
||||
print("Hint:", file=sys.stderr)
|
||||
print(" git submodule update --init", file=sys.stderr)
|
||||
|
@ -27,133 +32,23 @@ extensions = [
|
|||
"aiohttp._http_parser",
|
||||
[
|
||||
"aiohttp/_http_parser.c",
|
||||
"vendor/http-parser/http_parser.c",
|
||||
"aiohttp/_find_header.c",
|
||||
"vendor/llhttp/build/c/llhttp.c",
|
||||
"vendor/llhttp/src/native/api.c",
|
||||
"vendor/llhttp/src/native/http.c",
|
||||
],
|
||||
define_macros=[("HTTP_PARSER_STRICT", 0)],
|
||||
define_macros=[("LLHTTP_STRICT_MODE", 0)],
|
||||
include_dirs=["vendor/llhttp/build"],
|
||||
),
|
||||
Extension("aiohttp._frozenlist", ["aiohttp/_frozenlist.c"]),
|
||||
Extension("aiohttp._helpers", ["aiohttp/_helpers.c"]),
|
||||
Extension("aiohttp._http_writer", ["aiohttp/_http_writer.c"]),
|
||||
]
|
||||
|
||||
|
||||
class BuildFailed(Exception):
|
||||
pass
|
||||
build_type = "Pure" if NO_EXTENSIONS else "Accelerated"
|
||||
setup_kwargs = {} if NO_EXTENSIONS else {"ext_modules": extensions}
|
||||
|
||||
|
||||
class ve_build_ext(build_ext):
|
||||
# This class allows C extension building to fail.
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
build_ext.run(self)
|
||||
except (DistutilsPlatformError, FileNotFoundError):
|
||||
raise BuildFailed()
|
||||
|
||||
def build_extension(self, ext):
|
||||
try:
|
||||
build_ext.build_extension(self, ext)
|
||||
except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError):
|
||||
raise BuildFailed()
|
||||
|
||||
|
||||
txt = (here / "aiohttp" / "__init__.py").read_text("utf-8")
|
||||
try:
|
||||
version = re.findall(r'^__version__ = "([^"]+)"\r?$', txt, re.M)[0]
|
||||
except IndexError:
|
||||
raise RuntimeError("Unable to determine version.")
|
||||
|
||||
install_requires = [
|
||||
"attrs>=17.3.0",
|
||||
"chardet>=2.0,<5.0",
|
||||
"multidict>=4.5,<7.0",
|
||||
"async_timeout>=3.0,<4.0",
|
||||
"yarl>=1.0,<2.0",
|
||||
'idna-ssl>=1.0; python_version<"3.7"',
|
||||
"typing_extensions>=3.6.5",
|
||||
]
|
||||
|
||||
|
||||
def read(f):
|
||||
return (here / f).read_text("utf-8").strip()
|
||||
|
||||
|
||||
NEEDS_PYTEST = {"pytest", "test"}.intersection(sys.argv)
|
||||
pytest_runner = ["pytest-runner"] if NEEDS_PYTEST else []
|
||||
|
||||
tests_require = [
|
||||
"pytest",
|
||||
"gunicorn",
|
||||
"pytest-timeout",
|
||||
"async-generator",
|
||||
"pytest-xdist",
|
||||
]
|
||||
|
||||
|
||||
args = dict(
|
||||
name="aiohttp",
|
||||
version=version,
|
||||
description="Async http client/server framework (asyncio)",
|
||||
long_description="\n\n".join((read("README.rst"), read("CHANGES.rst"))),
|
||||
classifiers=[
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Topic :: Internet :: WWW/HTTP",
|
||||
"Framework :: AsyncIO",
|
||||
],
|
||||
author="Nikolay Kim",
|
||||
author_email="fafhrd91@gmail.com",
|
||||
maintainer=", ".join(
|
||||
(
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Andrew Svetlov <andrew.svetlov@gmail.com>",
|
||||
)
|
||||
),
|
||||
maintainer_email="aio-libs@googlegroups.com",
|
||||
url="https://github.com/aio-libs/aiohttp",
|
||||
project_urls={
|
||||
"Chat: Gitter": "https://gitter.im/aio-libs/Lobby",
|
||||
"CI: Azure Pipelines": "https://dev.azure.com/aio-libs/aiohttp/_build",
|
||||
"Coverage: codecov": "https://codecov.io/github/aio-libs/aiohttp",
|
||||
"Docs: RTD": "https://docs.aiohttp.org",
|
||||
"GitHub: issues": "https://github.com/aio-libs/aiohttp/issues",
|
||||
"GitHub: repo": "https://github.com/aio-libs/aiohttp",
|
||||
},
|
||||
license="Apache 2",
|
||||
packages=["aiohttp"],
|
||||
python_requires=">=3.6",
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
"speedups": [
|
||||
"aiodns",
|
||||
"brotlipy",
|
||||
"cchardet",
|
||||
],
|
||||
},
|
||||
tests_require=tests_require,
|
||||
setup_requires=pytest_runner,
|
||||
include_package_data=True,
|
||||
ext_modules=extensions,
|
||||
cmdclass=dict(build_ext=ve_build_ext),
|
||||
)
|
||||
|
||||
try:
|
||||
setup(**args)
|
||||
except BuildFailed:
|
||||
print("************************************************************")
|
||||
print("Cannot compile C accelerator module, use pure python version")
|
||||
print("************************************************************")
|
||||
del args["ext_modules"]
|
||||
del args["cmdclass"]
|
||||
setup(**args)
|
||||
print("*********************", file=sys.stderr)
|
||||
print("* {build_type} build *".format_map(locals()), file=sys.stderr)
|
||||
print("*********************", file=sys.stderr)
|
||||
setup(**setup_kwargs)
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
LLHTTP
|
||||
------
|
||||
|
||||
When building aiohttp from source, there is a pure Python parser used by default.
|
||||
For better performance, you may want to build the higher performance C parser.
|
||||
|
||||
To build this ``llhttp`` parser, first get/update the submodules (to update to a
|
||||
newer release, add ``--remote`` and check the branch in ``.gitmodules``)::
|
||||
|
||||
git submodule update --init --recursive
|
||||
|
||||
Then build ``llhttp``::
|
||||
|
||||
cd vendor/llhttp/
|
||||
npm install
|
||||
make
|
||||
|
||||
Then build our parser::
|
||||
|
||||
cd -
|
||||
make cythonize
|
||||
|
||||
Then you can build or install it with ``python -m build`` or ``pip install -e .``
|
|
@ -1,30 +0,0 @@
|
|||
/out/
|
||||
core
|
||||
tags
|
||||
*.o
|
||||
test
|
||||
test_g
|
||||
test_fast
|
||||
bench
|
||||
url_parser
|
||||
parsertrace
|
||||
parsertrace_g
|
||||
*.mk
|
||||
*.Makefile
|
||||
*.so.*
|
||||
*.exe.*
|
||||
*.exe
|
||||
*.a
|
||||
|
||||
|
||||
# Visual Studio uglies
|
||||
*.suo
|
||||
*.sln
|
||||
*.vcxproj
|
||||
*.vcxproj.filters
|
||||
*.vcxproj.user
|
||||
*.opensdf
|
||||
*.ncrunchsolution*
|
||||
*.sdf
|
||||
*.vsp
|
||||
*.psess
|
|
@ -1,8 +0,0 @@
|
|||
# update AUTHORS with:
|
||||
# git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS
|
||||
Ryan Dahl <ry@tinyclouds.org>
|
||||
Salman Haq <salman.haq@asti-usa.com>
|
||||
Simon Zimmermann <simonz05@gmail.com>
|
||||
Thomas LE ROUX <thomas@november-eleven.fr> LE ROUX Thomas <thomas@procheo.fr>
|
||||
Thomas LE ROUX <thomas@november-eleven.fr> Thomas LE ROUX <thomas@procheo.fr>
|
||||
Fedor Indutny <fedor@indutny.com>
|
|
@ -1,13 +0,0 @@
|
|||
language: c
|
||||
|
||||
compiler:
|
||||
- clang
|
||||
- gcc
|
||||
|
||||
script:
|
||||
- "make"
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
irc:
|
||||
- "irc.freenode.net#node-ci"
|
|
@ -1,68 +0,0 @@
|
|||
# Authors ordered by first contribution.
|
||||
Ryan Dahl <ry@tinyclouds.org>
|
||||
Jeremy Hinegardner <jeremy@hinegardner.org>
|
||||
Sergey Shepelev <temotor@gmail.com>
|
||||
Joe Damato <ice799@gmail.com>
|
||||
tomika <tomika_nospam@freemail.hu>
|
||||
Phoenix Sol <phoenix@burninglabs.com>
|
||||
Cliff Frey <cliff@meraki.com>
|
||||
Ewen Cheslack-Postava <ewencp@cs.stanford.edu>
|
||||
Santiago Gala <sgala@apache.org>
|
||||
Tim Becker <tim.becker@syngenio.de>
|
||||
Jeff Terrace <jterrace@gmail.com>
|
||||
Ben Noordhuis <info@bnoordhuis.nl>
|
||||
Nathan Rajlich <nathan@tootallnate.net>
|
||||
Mark Nottingham <mnot@mnot.net>
|
||||
Aman Gupta <aman@tmm1.net>
|
||||
Tim Becker <tim.becker@kuriositaet.de>
|
||||
Sean Cunningham <sean.cunningham@mandiant.com>
|
||||
Peter Griess <pg@std.in>
|
||||
Salman Haq <salman.haq@asti-usa.com>
|
||||
Cliff Frey <clifffrey@gmail.com>
|
||||
Jon Kolb <jon@b0g.us>
|
||||
Fouad Mardini <f.mardini@gmail.com>
|
||||
Paul Querna <pquerna@apache.org>
|
||||
Felix Geisendörfer <felix@debuggable.com>
|
||||
koichik <koichik@improvement.jp>
|
||||
Andre Caron <andre.l.caron@gmail.com>
|
||||
Ivo Raisr <ivosh@ivosh.net>
|
||||
James McLaughlin <jamie@lacewing-project.org>
|
||||
David Gwynne <loki@animata.net>
|
||||
Thomas LE ROUX <thomas@november-eleven.fr>
|
||||
Randy Rizun <rrizun@ortivawireless.com>
|
||||
Andre Louis Caron <andre.louis.caron@usherbrooke.ca>
|
||||
Simon Zimmermann <simonz05@gmail.com>
|
||||
Erik Dubbelboer <erik@dubbelboer.com>
|
||||
Martell Malone <martellmalone@gmail.com>
|
||||
Bertrand Paquet <bpaquet@octo.com>
|
||||
BogDan Vatra <bogdan@kde.org>
|
||||
Peter Faiman <peter@thepicard.org>
|
||||
Corey Richardson <corey@octayn.net>
|
||||
Tóth Tamás <tomika_nospam@freemail.hu>
|
||||
Cam Swords <cam.swords@gmail.com>
|
||||
Chris Dickinson <christopher.s.dickinson@gmail.com>
|
||||
Uli Köhler <ukoehler@btronik.de>
|
||||
Charlie Somerville <charlie@charliesomerville.com>
|
||||
Patrik Stutz <patrik.stutz@gmail.com>
|
||||
Fedor Indutny <fedor.indutny@gmail.com>
|
||||
runner <runner.mei@gmail.com>
|
||||
Alexis Campailla <alexis@janeasystems.com>
|
||||
David Wragg <david@wragg.org>
|
||||
Vinnie Falco <vinnie.falco@gmail.com>
|
||||
Alex Butum <alexbutum@linux.com>
|
||||
Rex Feng <rexfeng@gmail.com>
|
||||
Alex Kocharin <alex@kocharin.ru>
|
||||
Mark Koopman <markmontymark@yahoo.com>
|
||||
Helge Heß <me@helgehess.eu>
|
||||
Alexis La Goutte <alexis.lagoutte@gmail.com>
|
||||
George Miroshnykov <george.miroshnykov@gmail.com>
|
||||
Maciej Małecki <me@mmalecki.com>
|
||||
Marc O'Morain <github.com@marcomorain.com>
|
||||
Jeff Pinner <jpinner@twitter.com>
|
||||
Timothy J Fontaine <tjfontaine@gmail.com>
|
||||
Akagi201 <akagi201@gmail.com>
|
||||
Romain Giraud <giraud.romain@gmail.com>
|
||||
Jay Satiro <raysatiro@yahoo.com>
|
||||
Arne Steen <Arne.Steen@gmx.de>
|
||||
Kjell Schubert <kjell.schubert@gmail.com>
|
||||
Olivier Mengué <dolmen@cpan.org>
|
|
@ -1,160 +0,0 @@
|
|||
# Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"')
|
||||
HELPER ?=
|
||||
BINEXT ?=
|
||||
SOLIBNAME = libhttp_parser
|
||||
SOMAJOR = 2
|
||||
SOMINOR = 9
|
||||
SOREV = 4
|
||||
ifeq (darwin,$(PLATFORM))
|
||||
SOEXT ?= dylib
|
||||
SONAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT)
|
||||
LIBNAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOREV).$(SOEXT)
|
||||
else ifeq (wine,$(PLATFORM))
|
||||
CC = winegcc
|
||||
BINEXT = .exe.so
|
||||
HELPER = wine
|
||||
else
|
||||
SOEXT ?= so
|
||||
SONAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR)
|
||||
LIBNAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR).$(SOREV)
|
||||
endif
|
||||
|
||||
CC?=gcc
|
||||
AR?=ar
|
||||
|
||||
CPPFLAGS ?=
|
||||
LDFLAGS ?=
|
||||
|
||||
CPPFLAGS += -I.
|
||||
CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1
|
||||
CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA)
|
||||
CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0
|
||||
CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA)
|
||||
CPPFLAGS_BENCH = $(CPPFLAGS_FAST)
|
||||
|
||||
CFLAGS += -Wall -Wextra -Werror
|
||||
CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA)
|
||||
CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA)
|
||||
CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter
|
||||
CFLAGS_LIB = $(CFLAGS_FAST) -fPIC
|
||||
|
||||
LDFLAGS_LIB = $(LDFLAGS) -shared
|
||||
|
||||
INSTALL ?= install
|
||||
PREFIX ?= /usr/local
|
||||
LIBDIR = $(PREFIX)/lib
|
||||
INCLUDEDIR = $(PREFIX)/include
|
||||
|
||||
ifeq (darwin,$(PLATFORM))
|
||||
LDFLAGS_LIB += -Wl,-install_name,$(LIBDIR)/$(SONAME)
|
||||
else
|
||||
# TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname...
|
||||
LDFLAGS_LIB += -Wl,-soname=$(SONAME)
|
||||
endif
|
||||
|
||||
test: test_g test_fast
|
||||
$(HELPER) ./test_g$(BINEXT)
|
||||
$(HELPER) ./test_fast$(BINEXT)
|
||||
|
||||
test_g: http_parser_g.o test_g.o
|
||||
$(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@
|
||||
|
||||
test_g.o: test.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@
|
||||
|
||||
http_parser_g.o: http_parser.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@
|
||||
|
||||
test_fast: http_parser.o test.o http_parser.h
|
||||
$(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@
|
||||
|
||||
test.o: test.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@
|
||||
|
||||
bench: http_parser.o bench.o
|
||||
$(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@
|
||||
|
||||
bench.o: bench.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@
|
||||
|
||||
http_parser.o: http_parser.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c
|
||||
|
||||
test-run-timed: test_fast
|
||||
while(true) do time $(HELPER) ./test_fast$(BINEXT) > /dev/null; done
|
||||
|
||||
test-valgrind: test_g
|
||||
valgrind ./test_g
|
||||
|
||||
libhttp_parser.o: http_parser.c http_parser.h Makefile
|
||||
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o
|
||||
|
||||
library: libhttp_parser.o
|
||||
$(CC) $(LDFLAGS_LIB) -o $(LIBNAME) $<
|
||||
|
||||
package: http_parser.o
|
||||
$(AR) rcs libhttp_parser.a http_parser.o
|
||||
|
||||
url_parser: http_parser.o contrib/url_parser.c
|
||||
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@
|
||||
|
||||
url_parser_g: http_parser_g.o contrib/url_parser.c
|
||||
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@
|
||||
|
||||
parsertrace: http_parser.o contrib/parsertrace.c
|
||||
$(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace$(BINEXT)
|
||||
|
||||
parsertrace_g: http_parser_g.o contrib/parsertrace.c
|
||||
$(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g$(BINEXT)
|
||||
|
||||
tags: http_parser.c http_parser.h test.c
|
||||
ctags $^
|
||||
|
||||
install: library
|
||||
$(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
|
||||
$(INSTALL) -D $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
|
||||
ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME)
|
||||
ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
|
||||
|
||||
install-strip: library
|
||||
$(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h
|
||||
$(INSTALL) -D -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME)
|
||||
ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME)
|
||||
ln -sf $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
|
||||
|
||||
uninstall:
|
||||
rm $(DESTDIR)$(INCLUDEDIR)/http_parser.h
|
||||
rm $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT)
|
||||
rm $(DESTDIR)$(LIBDIR)/$(SONAME)
|
||||
rm $(DESTDIR)$(LIBDIR)/$(LIBNAME)
|
||||
|
||||
clean:
|
||||
rm -f *.o *.a tags test test_fast test_g \
|
||||
http_parser.tar libhttp_parser.so.* \
|
||||
url_parser url_parser_g parsertrace parsertrace_g \
|
||||
*.exe *.exe.so
|
||||
|
||||
contrib/url_parser.c: http_parser.h
|
||||
contrib/parsertrace.c: http_parser.h
|
||||
|
||||
.PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall
|
|
@ -1,246 +0,0 @@
|
|||
HTTP Parser
|
||||
===========
|
||||
|
||||
[![Build Status](https://api.travis-ci.org/nodejs/http-parser.svg?branch=master)](https://travis-ci.org/nodejs/http-parser)
|
||||
|
||||
This is a parser for HTTP messages written in C. It parses both requests and
|
||||
responses. The parser is designed to be used in performance HTTP
|
||||
applications. It does not make any syscalls nor allocations, it does not
|
||||
buffer data, it can be interrupted at anytime. Depending on your
|
||||
architecture, it only requires about 40 bytes of data per message
|
||||
stream (in a web server that is per connection).
|
||||
|
||||
Features:
|
||||
|
||||
* No dependencies
|
||||
* Handles persistent streams (keep-alive).
|
||||
* Decodes chunked encoding.
|
||||
* Upgrade support
|
||||
* Defends against buffer overflow attacks.
|
||||
|
||||
The parser extracts the following information from HTTP messages:
|
||||
|
||||
* Header fields and values
|
||||
* Content-Length
|
||||
* Request method
|
||||
* Response status code
|
||||
* Transfer-Encoding
|
||||
* HTTP version
|
||||
* Request URL
|
||||
* Message body
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
One `http_parser` object is used per TCP connection. Initialize the struct
|
||||
using `http_parser_init()` and set the callbacks. That might look something
|
||||
like this for a request parser:
|
||||
```c
|
||||
http_parser_settings settings;
|
||||
settings.on_url = my_url_callback;
|
||||
settings.on_header_field = my_header_field_callback;
|
||||
/* ... */
|
||||
|
||||
http_parser *parser = malloc(sizeof(http_parser));
|
||||
http_parser_init(parser, HTTP_REQUEST);
|
||||
parser->data = my_socket;
|
||||
```
|
||||
|
||||
When data is received on the socket execute the parser and check for errors.
|
||||
|
||||
```c
|
||||
size_t len = 80*1024, nparsed;
|
||||
char buf[len];
|
||||
ssize_t recved;
|
||||
|
||||
recved = recv(fd, buf, len, 0);
|
||||
|
||||
if (recved < 0) {
|
||||
/* Handle error. */
|
||||
}
|
||||
|
||||
/* Start up / continue the parser.
|
||||
* Note we pass recved==0 to signal that EOF has been received.
|
||||
*/
|
||||
nparsed = http_parser_execute(parser, &settings, buf, recved);
|
||||
|
||||
if (parser->upgrade) {
|
||||
/* handle new protocol */
|
||||
} else if (nparsed != recved) {
|
||||
/* Handle error. Usually just close the connection. */
|
||||
}
|
||||
```
|
||||
|
||||
`http_parser` needs to know where the end of the stream is. For example, sometimes
|
||||
servers send responses without Content-Length and expect the client to
|
||||
consume input (for the body) until EOF. To tell `http_parser` about EOF, give
|
||||
`0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors
|
||||
can still be encountered during an EOF, so one must still be prepared
|
||||
to receive them.
|
||||
|
||||
Scalar valued message information such as `status_code`, `method`, and the
|
||||
HTTP version are stored in the parser structure. This data is only
|
||||
temporally stored in `http_parser` and gets reset on each new message. If
|
||||
this information is needed later, copy it out of the structure during the
|
||||
`headers_complete` callback.
|
||||
|
||||
The parser decodes the transfer-encoding for both requests and responses
|
||||
transparently. That is, a chunked encoding is decoded before being sent to
|
||||
the on_body callback.
|
||||
|
||||
|
||||
The Special Problem of Upgrade
|
||||
------------------------------
|
||||
|
||||
`http_parser` supports upgrading the connection to a different protocol. An
|
||||
increasingly common example of this is the WebSocket protocol which sends
|
||||
a request like
|
||||
|
||||
GET /demo HTTP/1.1
|
||||
Upgrade: WebSocket
|
||||
Connection: Upgrade
|
||||
Host: example.com
|
||||
Origin: http://example.com
|
||||
WebSocket-Protocol: sample
|
||||
|
||||
followed by non-HTTP data.
|
||||
|
||||
(See [RFC6455](https://tools.ietf.org/html/rfc6455) for more information the
|
||||
WebSocket protocol.)
|
||||
|
||||
To support this, the parser will treat this as a normal HTTP message without a
|
||||
body, issuing both on_headers_complete and on_message_complete callbacks. However
|
||||
http_parser_execute() will stop parsing at the end of the headers and return.
|
||||
|
||||
The user is expected to check if `parser->upgrade` has been set to 1 after
|
||||
`http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied
|
||||
offset by the return value of `http_parser_execute()`.
|
||||
|
||||
|
||||
Callbacks
|
||||
---------
|
||||
|
||||
During the `http_parser_execute()` call, the callbacks set in
|
||||
`http_parser_settings` will be executed. The parser maintains state and
|
||||
never looks behind, so buffering the data is not necessary. If you need to
|
||||
save certain data for later usage, you can do that from the callbacks.
|
||||
|
||||
There are two types of callbacks:
|
||||
|
||||
* notification `typedef int (*http_cb) (http_parser*);`
|
||||
Callbacks: on_message_begin, on_headers_complete, on_message_complete.
|
||||
* data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);`
|
||||
Callbacks: (requests only) on_url,
|
||||
(common) on_header_field, on_header_value, on_body;
|
||||
|
||||
Callbacks must return 0 on success. Returning a non-zero value indicates
|
||||
error to the parser, making it exit immediately.
|
||||
|
||||
For cases where it is necessary to pass local information to/from a callback,
|
||||
the `http_parser` object's `data` field can be used.
|
||||
An example of such a case is when using threads to handle a socket connection,
|
||||
parse a request, and then give a response over that socket. By instantiation
|
||||
of a thread-local struct containing relevant data (e.g. accepted socket,
|
||||
allocated memory for callbacks to write into, etc), a parser's callbacks are
|
||||
able to communicate data between the scope of the thread and the scope of the
|
||||
callback in a threadsafe manner. This allows `http_parser` to be used in
|
||||
multi-threaded contexts.
|
||||
|
||||
Example:
|
||||
```c
|
||||
typedef struct {
|
||||
socket_t sock;
|
||||
void* buffer;
|
||||
int buf_len;
|
||||
} custom_data_t;
|
||||
|
||||
|
||||
int my_url_callback(http_parser* parser, const char *at, size_t length) {
|
||||
/* access to thread local custom_data_t struct.
|
||||
Use this access save parsed data for later use into thread local
|
||||
buffer, or communicate over socket
|
||||
*/
|
||||
parser->data;
|
||||
...
|
||||
return 0;
|
||||
}
|
||||
|
||||
...
|
||||
|
||||
void http_parser_thread(socket_t sock) {
|
||||
int nparsed = 0;
|
||||
/* allocate memory for user data */
|
||||
custom_data_t *my_data = malloc(sizeof(custom_data_t));
|
||||
|
||||
/* some information for use by callbacks.
|
||||
* achieves thread -> callback information flow */
|
||||
my_data->sock = sock;
|
||||
|
||||
/* instantiate a thread-local parser */
|
||||
http_parser *parser = malloc(sizeof(http_parser));
|
||||
http_parser_init(parser, HTTP_REQUEST); /* initialise parser */
|
||||
/* this custom data reference is accessible through the reference to the
|
||||
parser supplied to callback functions */
|
||||
parser->data = my_data;
|
||||
|
||||
http_parser_settings settings; /* set up callbacks */
|
||||
settings.on_url = my_url_callback;
|
||||
|
||||
/* execute parser */
|
||||
nparsed = http_parser_execute(parser, &settings, buf, recved);
|
||||
|
||||
...
|
||||
/* parsed information copied from callback.
|
||||
can now perform action on data copied into thread-local memory from callbacks.
|
||||
achieves callback -> thread information flow */
|
||||
my_data->buffer;
|
||||
...
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
In case you parse HTTP message in chunks (i.e. `read()` request line
|
||||
from socket, parse, read half headers, parse, etc) your data callbacks
|
||||
may be called more than once. `http_parser` guarantees that data pointer is only
|
||||
valid for the lifetime of callback. You can also `read()` into a heap allocated
|
||||
buffer to avoid copying memory around if this fits your application.
|
||||
|
||||
Reading headers may be a tricky task if you read/parse headers partially.
|
||||
Basically, you need to remember whether last header callback was field or value
|
||||
and apply the following logic:
|
||||
|
||||
(on_header_field and on_header_value shortened to on_h_*)
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| State (prev. callback) | Callback | Description/action |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| nothing (first call) | on_h_field | Allocate new buffer and copy callback data |
|
||||
| | | into it |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| value | on_h_field | New header started. |
|
||||
| | | Copy current name,value buffers to headers |
|
||||
| | | list and allocate new buffer for new name |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| field | on_h_field | Previous name continues. Reallocate name |
|
||||
| | | buffer and append callback data to it |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| field | on_h_value | Value for current header started. Allocate |
|
||||
| | | new buffer and copy callback data to it |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
| value | on_h_value | Value continues. Reallocate value buffer |
|
||||
| | | and append callback data to it |
|
||||
------------------------ ------------ --------------------------------------------
|
||||
|
||||
|
||||
Parsing URLs
|
||||
------------
|
||||
|
||||
A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`.
|
||||
Users of this library may wish to use it to parse URLs constructed from
|
||||
consecutive `on_url` callbacks.
|
||||
|
||||
See examples of reading in headers:
|
||||
|
||||
* [partial example](http://gist.github.com/155877) in C
|
||||
* [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C
|
||||
* [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript
|
|
@ -1,128 +0,0 @@
|
|||
/* Copyright Fedor Indutny. All rights reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
#include "http_parser.h"
|
||||
#include <assert.h>
|
||||
#include <stdint.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <sys/time.h>
|
||||
|
||||
/* 8 gb */
|
||||
static const int64_t kBytes = 8LL << 30;
|
||||
|
||||
static const char data[] =
|
||||
"POST /joyent/http-parser HTTP/1.1\r\n"
|
||||
"Host: github.com\r\n"
|
||||
"DNT: 1\r\n"
|
||||
"Accept-Encoding: gzip, deflate, sdch\r\n"
|
||||
"Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n"
|
||||
"User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/39.0.2171.65 Safari/537.36\r\n"
|
||||
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,"
|
||||
"image/webp,*/*;q=0.8\r\n"
|
||||
"Referer: https://github.com/joyent/http-parser\r\n"
|
||||
"Connection: keep-alive\r\n"
|
||||
"Transfer-Encoding: chunked\r\n"
|
||||
"Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n";
|
||||
static const size_t data_len = sizeof(data) - 1;
|
||||
|
||||
static int on_info(http_parser* p) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int on_data(http_parser* p, const char *at, size_t length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static http_parser_settings settings = {
|
||||
.on_message_begin = on_info,
|
||||
.on_headers_complete = on_info,
|
||||
.on_message_complete = on_info,
|
||||
.on_header_field = on_data,
|
||||
.on_header_value = on_data,
|
||||
.on_url = on_data,
|
||||
.on_status = on_data,
|
||||
.on_body = on_data
|
||||
};
|
||||
|
||||
int bench(int iter_count, int silent) {
|
||||
struct http_parser parser;
|
||||
int i;
|
||||
int err;
|
||||
struct timeval start;
|
||||
struct timeval end;
|
||||
|
||||
if (!silent) {
|
||||
err = gettimeofday(&start, NULL);
|
||||
assert(err == 0);
|
||||
}
|
||||
|
||||
fprintf(stderr, "req_len=%d\n", (int) data_len);
|
||||
for (i = 0; i < iter_count; i++) {
|
||||
size_t parsed;
|
||||
http_parser_init(&parser, HTTP_REQUEST);
|
||||
|
||||
parsed = http_parser_execute(&parser, &settings, data, data_len);
|
||||
assert(parsed == data_len);
|
||||
}
|
||||
|
||||
if (!silent) {
|
||||
double elapsed;
|
||||
double bw;
|
||||
double total;
|
||||
|
||||
err = gettimeofday(&end, NULL);
|
||||
assert(err == 0);
|
||||
|
||||
fprintf(stdout, "Benchmark result:\n");
|
||||
|
||||
elapsed = (double) (end.tv_sec - start.tv_sec) +
|
||||
(end.tv_usec - start.tv_usec) * 1e-6f;
|
||||
|
||||
total = (double) iter_count * data_len;
|
||||
bw = (double) total / elapsed;
|
||||
|
||||
fprintf(stdout, "%.2f mb | %.2f mb/s | %.2f req/sec | %.2f s\n",
|
||||
(double) total / (1024 * 1024),
|
||||
bw / (1024 * 1024),
|
||||
(double) iter_count / elapsed,
|
||||
elapsed);
|
||||
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
int64_t iterations;
|
||||
|
||||
iterations = kBytes / (int64_t) data_len;
|
||||
if (argc == 2 && strcmp(argv[1], "infinite") == 0) {
|
||||
for (;;)
|
||||
bench(iterations, 1);
|
||||
return 0;
|
||||
} else {
|
||||
return bench(iterations, 0);
|
||||
}
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче